[Twisted-Python] DeferredList?

Don Smith donwsmith at gmail.com
Tue Feb 26 15:41:56 MST 2008


Greetings,

I have a problem I hope someone here can assist with. I'm using TwistedSNMP
to query a bunch of SNMP devices asynchronously. The behavior I'm trying to
get is to get is to query all the devices via SNMP, each query returns a
deferred, and when all their callbacks have been fired then I want to stop
the reactor and thereby exit the program. I thought perhaps I could put each
of these SNMP deferreds in a DeferredList and add a callback to the
DeferredList that would stop the reactor but it does not do that. Enclosed
is a code sample. Am I doing something incorrectly, or should I do something
different?

Thanks! -Don

"""Trivial example to retrieve an OID from a remote Agent"""
from twisted.internet import reactor
from twistedsnmp import snmpprotocol, agentproxy
from twisted.enterprise import adbapi
from twisted.internet import defer
import os

APPNAME = 'ClearSNMP'
d_results = {} #dictionary to store results
device_name = 'Unknown'

db_conn = {'user':'sa',
            'password':'password',
            'host':'localhost',
            'database':'ClearSNMP'
            }

community_string = 'public'

deferred_list = [] #list to help group the snmp requests

outfile = open('outfile.csv','wb',0)
#add headers to the outfile
outfile.write
("device_name,link_oid,link_name,link_capacity_oid,link_capacity,traffic_in_oid,traffic_in,traffic_out_oid,traffic_out")

#create the database connection pool
dbpool = adbapi.ConnectionPool("pymssql", user=db_conn['user'],
password=db_conn['password'], host=db_conn['host'],
database=db_conn['database'])


def main( class_handler, proxy, oids ):
    """Do a getTable on proxy for OIDs and store in oidStore"""
    df = proxy.getTable(
        oids, timeout=.25, retryCount=5
    )
    if class_handler == 'tasman':
        df.addCallback( tasmanResults )
    else:
        df.addCallback( results )
    #df.addCallback( exiter )
    df.addErrback( errorReporter, proxy )
    #df.addErrback( exiter )
    return df


def tasmanResults( result ):

    """Results 'appear' to be a nested dictionary, but it is really an
object of OIDs. I figured out how to get to the OIDs by
    casting them as a dictionary using the built_in dict() function. Now I
can iterate over all the OIDs."""
    #print 'Results:'
    d_table_key = {}
    for table_key in result.keys():
        #get the device name. for some reason i have to do this in a loop as
just saying dict(result[table_key])['.1.3.6.1.2.1.1.5.0'] doesn't work
        for oid in dict(result[table_key]).keys():
            if oid=='.1.3.6.1.2.1.1.5.0':
                device_name=str(dict(result[table_key])[oid])

        d_oid = {}
        for oid in dict(result[table_key]).keys():
            d_oid[str(oid)]=dict(result[table_key])[oid]
        d_table_key[str(table_key)] = d_oid
    d_results[device_name]=d_table_key

    #specify the table oids so we can match them appropriately later
    link_name_table    = ".1.3.6.1.2.1.2.2.1.2"
    link_capacity_table    = ".1.3.6.1.2.1.2.2.1.5"
    traffic_in_table    = ".1.3.6.1.2.1.2.2.1.10"
    traffic_out_table    = ".1.3.6.1.2.1.2.2.1.16"

    # For each link name in the table I need to get the values from the
link_capacity, traffic_in and traffic_out tables and put them in the same
line

    for i in d_results.keys():
        d_row = {} #holds the column values for a row

        #set device_name in Row
        d_row['device_name'] = device_name

        for k in d_results[i][link_name_table]:
            #set link_oid and link_name in Row
            d_row['link_oid'] = k
            d_row['link_name'] =
d_results[i][link_name_table][d_row['link_oid']]

            #lookup the capacity metric for this link_oid
            #create the oid to lookup
            d_row['link_capacity_oid'] =
d_row['link_oid'].replace(link_name_table,link_capacity_table)
            d_row['link_capacity'] =
d_results[i][link_capacity_table][d_row['link_capacity_oid']]

            #lookup the traffic_in metric for this link_oid
            #create the oid to lookup
            d_row['traffic_in_oid'] =
d_row['link_oid'].replace(link_name_table,traffic_in_table)
            d_row['traffic_in'] =
d_results[i][traffic_in_table][d_row['traffic_in_oid']]

            #lookup the traffic_out metric for this link_oid
            #create the oid to lookup
            d_row['traffic_out_oid'] =
d_row['link_oid'].replace(link_name_table,traffic_out_table)
            d_row['traffic_out'] =
d_results[i][traffic_out_table][d_row['traffic_out_oid']]

            #Calculate Utilization - if we can


            #print d_row
            out =
d_row['device_name']+","+d_row['link_oid']+","+d_row['link_name']+","+d_row['link_capacity_oid']+","+str(d_row['link_capacity'])+","+d_row['traffic_in_oid']+","+str(d_row['traffic_in'])+","+d_row['traffic_out_oid']+","+str(d_row['traffic_out'])+'\r\n'
            #print out
            outfile.write(out)
    return result


def errorReporter( err, proxy ):
    #print 'ERROR', err.getTraceback()
    #log the failed snmp query attempt
    print 'Failed to retrieve SNMP counters from agent:',proxy
    return err

def exiter( value ):

    reactor.stop()
    outfile.close()

    return value


def getNetworkElements():
    return dbpool.runQuery("select top 10 ip, mkt, dns_name, dns_fqdn from
dns where dns_type='TASMAN'")

def printResult(l):
    for item in l:
        print "Fetching counters for "+item[2]
        #deferred_list.append(snmpSetup(item[0], 161, 'ctipublic','tasman'))
    ipAddress = item[0]
    portno = 161
    community = community_string
    class_handler = 'tasman'
        print ipAddress,portno
        # choose random port in range 25000 to 30000
        port = snmpprotocol.port()
        targetPort = int(portno)
        proxy = agentproxy.AgentProxy(ipAddress,
                  targetPort,
                  community = community,
                  snmpVersion = 'v1',
                  protocol = port.protocol,
                  )


        d_oids = {'.1.3.6.1.2.1.1':"System Tables",
            '.1.3.6.1.2.1.2.2.1.2':"Circuit Name",
            '.1.3.6.1.2.1.2.2.1.5':"Capacity",
            '.1.3.6.1.2.1.2.2.1.10':"Traffic In",
            '.1.3.6.1.2.1.2.2.1.16':"Traffic Out"
            }

    """Do a getTable on proxy for OIDs and store in oidStore"""
    df = proxy.getTable(
        d_oids, timeout=.25, retryCount=5
    )
    if class_handler == 'tasman':
        df.addCallback( tasmanResults )
    else:
        df.addCallback( results )
    df.addErrback( errorReporter, proxy )
    deferred_list.append(df)
    return



if __name__ == "__main__":
    import sys
    #start the log service
    from twisted.python import log
    from twisted.python import logfile
    # rotate every 100000000 bytes
    f = logfile.LogFile(APPNAME+str(os.getpid())+".log", "Logs",
rotateLength=100000000)
    # setup logging to use our new logfile
    #log.startLogging(f)


    g = getNetworkElements().addCallback(printResult)
    dl = defer.DeferredList(deferred_list, 0, 0, 1 )
    print dir(dl)
    dl.addCallback(exiter)

    reactor.run()
-------------- next part --------------
An HTML attachment was scrubbed...
URL: http://twistedmatrix.com/pipermail/twisted-python/attachments/20080226/dcde14a6/attachment.htm 


More information about the Twisted-Python mailing list