计算Java中ping数据包的MIN,MAX和RTT

时间:2020-01-24 02:39:38

标签: java

我一直在研究模拟UDP ping客户端/服务器活动的任务。部分分配要求我们计算发送的所有数据包(其中的10个)的最小值,最大值和RTT。我撞到了砖墙。使用lang.math类库会更容易吗?还是创建一个for循环?老实说,我被困住了。请帮忙。

public class PingClient 
{
private static final int PING_TIMEOUT = 1000; //1,000 milliseconds = 1 sec

    long pkgsent = 0;
    long pkgreceived = 0;
    long pkglost = 0;
    long minvalue = 0;
    long maxvalue = 0;
    long rttvalue = 0;
    String ipaddress = "";

        // Get command line argument.
        if (args.length != 2) {
            System.out.println("Required arguments: port");
            return;
        }
        // Port number program needs to access
        int port = Integer.parseInt(args[1]);

        // Server the program will ping
        InetAddress server;
        server = InetAddress.getByName(args[0]);

        // Create a datagram socket for receiving and sending UDP packets through the port specified in the command line.
        DatagramSocket socket = new DatagramSocket();

        // Loop that begins the ping sequence
        for (int sequence_number = 0; sequence_number < 10; sequence_number++)
        {
            // Records the timestamp needed to calculate the ping delay
            Date pingdate = new Date();
            long msgsent = pingdate.getTime();

            // Creating a String object to pass the ping data
            String str = "udp_seq=" + sequence_number + "Time: " + msgsent +  " \n";
            byte [] buf = new byte [1024];
            buf = str.getBytes();

            // Create the datagram that sends in UDP protocol
            DatagramPacket ping = new DatagramPacket (buf, buf.length, server, port);

            // Send the data from the ping to the server specified above
            socket.send(ping);

            // Record that packet it sent
            pkgsent++;

            // Server tries to receive the packet. Simulate timeout if a reply isn't received from the server. Program will run a try-catch block to determine if there is a timeout
            try
            {
                // Set the timeout at 1,000 milliseconds
                socket.setSoTimeout(PING_TIMEOUT);

                // Create a new UDP packet to receive the data
                DatagramPacket response = new DatagramPacket (new byte[1024], 1024);

                // Attempt to receive the ping response
                socket.receive(response);

                //Record which packets were sent
                pkgreceived++;

                //Records IP address to pass to the ping stats header
                ipaddress = ping.getAddress().getHostAddress();

                // If the server receives a response, the code will continue. Otherwise, it'll fall into the catch statement
                // Get the timestamp of the received packet
                pingdate = new Date();
                long msgreceived = pingdate.getTime();

                // Create variable to store Ping Delay
                long packetdelay = (msgreceived - msgsent);

                // Print the packet data and the delay
                printData(response, packetdelay);
            } 
            catch (IOException e)
            {
                //List the packet(s) that timed out
                System.out.println("Packet " + sequence_number + " timed out");

                //Record which packets were lost
                pkglost++;
            }
        }
        // Print out the ping statistics
        System.out.println();
        System.out.println("--- " + ipaddress + " ping statistics ---");
        System.out.println(pkgsent + " packets transmitted, " + pkgreceived + "packets received, " + pkglost + "packets lost ");
        System.out.println("minimum: " + "maximum: " + "RTT: ");
    }

0 个答案:

没有答案