Need help in setting up mixed traffic

Nagaraja Gundurao Nagaraja_Gundurao at symantec.com
Mon Aug 7 23:57:53 UTC 2017


Hi,
  I am trying to achieve something like this
                       +----------const(5KB)------------\
Client(WPG -|                                                   |--------proxy--------------Server(WPG)
                       +----------CDB traffic------------/

client.pg                                                                                                         server.pg


This is what I plan to achieve


1.       The client.pg file has two robots defined, R1 for const(5KB) and R2 for CDB traffic(realistic content simulation)

2.       On the server, server.pg, I have defined two servers to server traffic for const(5KB) and cdb traffic.

Problem:  When I initiate the traffic, I see traffic for only one.  For eg.  if the entry in the server.pg has use(S1,S2) where S1
                   Is for const(5KB) and S2 is for cdb, then I see only traffic for 5KB

                   In the use, entry, if I switch the entries to show, use(S2,S1) now I see only cdb traffic and not const(5KB).  At anytime
          I did not see both the traffic coming through the proxy.

I am listing some of the errors here and also request you to send me(if this configuration is valid) an example file please.

Client.pg

/*
* A very simple "Hello, World!" workload
*/

// this is just one of the simplest workloads that can produce hits
// never use this workload for benchmarking


// SimpleContent defines properties of content that the server generates;
// if you get no hits, set SimpleContent.obj_life_cycle to cntStatic, which
// is defined in workloads/include/contents.pg

Content SimpleContent = {
        size = const(64KB);
       cachable = 80%;   // 20% of content is uncachable
};

AddrMap M = {
    names = [ 'www.dropbox.com' ];
    addresses = [ '10.0.15.60:443' ];
   //addresses = S.addresses;
   // names = tracedHosts(R.foreign_trace);
};
DnsResolver dr = {
    servers = [ '10.0.15.60:53' ];
    timeout = 5sec;
};

SslWrap wrap = {
        protocols = [ "any" ];
        root_certificate = "/home/xxx/xx.pem";
                //ciphers = [ "ALL:HIGH:" : 100% ];
                ciphers = [ "ALL:!DES-CBC-SHA:!EXP-DES-CBC-SHA:!EXP-RC4-MD5:!EXP-RC2-CBC-MD5:" : 100% ];

        rsa_key_sizes = [ 512bit, 1024bit, 2048bit ];
        session_resumption = 40%;
        session_cache = 100;
                verify_peer_certificate = false;
};
use(M);
// a primitive server cleverly labeled "S101"
// normally, you would specify more properties,
// but we will mostly rely on defaults for now
Server S = {
                kind = "S101";
                contents = [ SimpleContent ];
                direct_access = contents;

                addresses = ['10.0.15.60:443' ]; // where to create these server agents
                ssl_wraps = [ wrap ];

};

// a primitive robot
Robot R = {
                kind = "R101";

                interests = [ "foreign" ];
        foreign_trace = "/home/xx/xx.log";
                pop_model = { pop_distr = popUnif(); };
        recurrence = 55% / SimpleContent.cachable; // adjusted to get 55% DHR
        origins = S.addresses;      // where the origin servers are
        dns_resolver = dr;
        ssl_wraps = [ wrap ];

        MimeHeader user1 = 'ELASTICA_MAGIC_COOKIE: 280509165510:xx.user1 at xx';
        MimeHeader Host = 'Host: drive.google.com';
                MimeHeader User_Agent = 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:53.0) Gecko/20100101 Firefox/53.0';
                MimeHeader Accept = 'Accept: */*';
                MimeHeader Accept_Language = 'Accept-Language: en-US,en;q=0.5';
                MimeHeader Accept_Encoding = 'Accept-Encoding: gzip, deflate, br';
                http_headers = [[user1,Host,User_Agent,Accept,Accept_Language,Accept_Encoding,Referer,Cookie,x_elastica_gw,el_auth_param]: 100%];
        addresses = ['10.0.15.105' ** 1 ]; // where these robot agents will be created
//             req_rate = 0.1/sec;
};
Robot R1 = {

            pop_model = { pop_distr = popUnif(); };
            recurrence = 55% / SimpleContent.cachable; // adjusted to get 55% DHR
            origins = M.names;      // where the origin servers are
            dns_resolver = dr;
            ssl_wraps = [ wrap ];
            //session.busy_period.duration = 1sec;
            //session.idle_period_duration = exp(11sec);

        MimeHeader user1 = 'MAGIC_COOKIE: 280509165510:xxuser1 at xx.com';
        http_headers = [user1: 100%];
        addresses = ['10.0.15.105' ** 7 ]; // where these robot agents will be created
};
Phase phRampUp = { name = "rampup"; goal.duration = 5min; populus_factor_beg = 0; populus_factor_end = 1;};
Phase phRampDown = { name = "rampdown"; goal.duration = 10sec; populus_factor_beg = 1; populus_factor_end = 0;};
Phase phSustain = { name = "sustain"; goal.duration = 60min; populus_factor_beg = 1; populus_factor_end = 1;};


schedule(phRampUp,
         phSustain);

use(S,R1,R);

server.pg

/*
* A very simple "Hello, World!" workload
*/

// this is just one of the simplest workloads that can produce hits
// never use this workload for benchmarking


// SimpleContent defines properties of content that the server generates;
// if you get no hits, set SimpleContent.obj_life_cycle to cntStatic, which
// is defined in workloads/include/contents.pg

Content SimpleContent = {
       //size = const(64KB);
       content_db  = "/home/yy/yy.cdb";
       cachable = 80%;   // 20% of content is uncachable
};
Content SimpleContent1 = {
        size = const(5KB);
        cachable = 80%;   // 20% of content is uncachable
};

DnsResolver dr = {
    servers = [ '10.0.15.60:53' ];
    timeout = 5sec;
};

SslWrap wrap = {
        protocols = [ "any" ];
        root_certificate = "/yy/yy.pem";
                //ciphers = [ "ALL:HIGH:" : 100% ];
                ciphers = [ "ALL:!DES-CBC-SHA:!EXP-DES-CBC-SHA:!EXP-RC4-MD5:!EXP-RC2-CBC-MD5:" : 100% ];

        rsa_key_sizes = [ 512bit, 1024bit, 2048bit ];
        session_resumption = 40%;
        session_cache = 100;
                verify_peer_certificate = false;
};

// a primitive server cleverly labeled "S101"
// normally, you would specify more properties,
// but we will mostly rely on defaults for now
Server S = {
                kind = "S101";
                contents      = [ SimpleContent : 70%, SimpleContent1 : 30% ];
                direct_access = contents;

                addresses = ['10.0.15.60:443' ]; // where to create these server agents
                ssl_wraps = [ wrap ];

};

// a primitive robot
Robot R = {
                kind = "R101";

                interests = [ "foreign" ];
       foreign_trace = "/home/yy/yy.log";
                pop_model = { pop_distr = popUnif(); };
        recurrence = 55% / SimpleContent.cachable; // adjusted to get 55% DHR
        origins = S.addresses;      // where the origin servers are
        dns_resolver = dr;
        ssl_wraps = [ wrap ];

        MimeHeader user1 = 'MAGIC_COOKIE: 666923300190:yy.user1 at yy';
        MimeHeader Host = 'Host: drive.google.com';
      //MimeHeader User-Agent = 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:40.0) Gecko/20100101 Firefox/40.0';
                //MimeHeader Accept = 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8';
                //MimeHeader Accept-Language = 'Accept-Language: en-US,en;q=0.5';

                http_headers = [user1: 100%];
        addresses = ['10.0.15.105' ** 1 ]; // where these robot agents will be created
                req_rate = 0.1/sec;
};

// a 1:1 map
AddrMap M = {
    names = [ 'www.drive.google.com', 'dropbox.com' ];
    addresses = [ '10.0.15.60:80', '10.0.15.60:443' ];
    addresses = S.addresses;
    names = tracedHosts(R.foreign_trace);
};

Phase phRampUp = { name = "rampup"; goal.duration = 10sec; populus_factor_beg = 0; populus_factor_end = 1;};
Phase phRampDown = { name = "rampdown"; goal.duration = 10sec; populus_factor_beg = 1; populus_factor_end = 0;};
Phase phSustain = { name = "sustain"; goal.duration = 60min; populus_factor_beg = 1; populus_factor_end = 1;};


// build schedule using some well-known phases and phases defined above
schedule(phRampUp,
         phSustain);

//use(M);
use(S);

Errors

-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.web-polygraph.org/pipermail/users/attachments/20170807/04c0a086/attachment-0001.html>


More information about the Users mailing list