Bug 2339 - fournir une documentation utilisateur de physix
- désormais, la page clusteruserguide est générique : elle genere le user guide de simpatix et physix - désormais, la page clusteruserguide est mise à jour automatiquement : elle génère la liste les noeuds et les gflops dynamiquement en regardant la sortie de qstat et en utilisant la base de données SimpaDb.sql (on a réutilisé les classes qui avaent été écrites pour clusterstatusview)
This commit is contained in:
parent
2dbf8ceea0
commit
64813cb35a
|
@ -50,6 +50,15 @@ class QstatParser:
|
|||
assert False, 'unhandled queue machine state flag :"' + c + '"'
|
||||
return queueMachineState
|
||||
def parseQstatOutput( self, qstatOutput ):
|
||||
|
||||
# ugly hack to work around the fact that qstat truncates the fqdn of cluster nodes
|
||||
# graffy@physix-master:~$ qstat -f -u \*
|
||||
# queuename qtype resv/used/tot. load_avg arch states
|
||||
# ---------------------------------------------------------------------------------
|
||||
# main.q@physix88.ipr.univ-renne BIP 0/0/36 14.03 lx-amd64
|
||||
# TODO: fix this properly by parsing the output of 'qstat -f -u \* -xml' instead of 'qstat -f -u \*'
|
||||
qstatOutput = re.sub('\.univ[^ ]*', '.univ-rennes1.fr', qstatOutput)
|
||||
|
||||
jobsState = JobsState()
|
||||
f = StringIO.StringIO(qstatOutput)
|
||||
line = f.readline()
|
||||
|
@ -66,7 +75,7 @@ class QstatParser:
|
|||
jobRegularExp = re.compile( '^[ ]*(?P<jobId>[^ ]+)[ ]+[0-9.]+[ ]+(?P<jobScriptName>[^ ]+)[ ]+(?P<jobOwner>[^ ]+)[ ]+(?P<jobStatus>[^ ]+)[ ]+(?P<jobStartOrSubmitTime>[0-9][0-9]/[0-9][0-9]/[0-9][0-9][0-9][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9])[ ]+(?P<numSlots>[0-9]+)[ ]+(?P<jobArrayDetails>[^\n]*)[\s]*$' )
|
||||
# example of machine line :
|
||||
# allintel.q@simpatix34.univ-ren BIP 0/6/8 6.00 darwin-x86
|
||||
machineRegularExp = re.compile( '^(?P<queueName>[^@]+)@(?P<machineName>[^.]+)[^ ]+[ ]+(?P<queueTypeString>[^ ]+)[ ]+(?P<numReservedSlots>[^/]+)/(?P<numUsedSlots>[^/]+)/(?P<numTotalSlots>[^ ]+)[ ]+(?P<cpuLoad>[^ ]+)[\s]+(?P<archName>[^ ]+)[\s]+(?P<queueMachineStatus>[^\s]*)' )
|
||||
machineRegularExp = re.compile( '^(?P<queueName>[^@]+)@(?P<machineName>[^ ]+)[ ]+(?P<queueTypeString>[^ ]+)[ ]+(?P<numReservedSlots>[^/]+)/(?P<numUsedSlots>[^/]+)/(?P<numTotalSlots>[^ ]+)[ ]+(?P<cpuLoad>[^ ]+)[\s]+(?P<archName>[^ ]+)[\s]+(?P<queueMachineStatus>[^\s]*)' )
|
||||
pendingJobsHeaderRegularExp = re.compile( '^ - PENDING JOBS - PENDING JOBS - PENDING JOBS - PENDING JOBS - PENDING JOBS[?]*' )
|
||||
while( len(line) > 0 ):
|
||||
# print line
|
||||
|
|
|
@ -70,7 +70,7 @@ class RemoteMysqlDb(ISqlDatabaseBackend):
|
|||
|
||||
|
||||
class SqlFile(ISqlDatabaseBackend):
|
||||
def __init__(self, sql_file_path):
|
||||
def __init__(self, sql_file_path, truncate_hex_strings = False):
|
||||
"""
|
||||
:param str sql_file_path: the path of the sql file containing the inventory database
|
||||
"""
|
||||
|
@ -90,7 +90,12 @@ class SqlFile(ISqlDatabaseBackend):
|
|||
#print(sql)
|
||||
self._cur = self._con.cursor()
|
||||
#print(mysql_to_sqlite(sql))
|
||||
self._cur.executescript(mysql_to_sqlite(sql))
|
||||
sqlite_sql = mysql_to_sqlite(sql, truncate_hex_strings)
|
||||
#with open('/tmp/toto.sqlite.sql', 'w') as f:
|
||||
# f.write(sqlite_sql)
|
||||
#with open('/tmp/toto.sqlite.sql', 'r') as f:
|
||||
# sqlite_sql = f.read()
|
||||
self._cur.executescript(sqlite_sql)
|
||||
|
||||
def query(self, sql_query):
|
||||
"""
|
||||
|
|
13
inventory.py
13
inventory.py
|
@ -81,6 +81,7 @@ class Inventory( object ):
|
|||
ordering_date = datetime.datetime.strptime(ordering_date_as_str, '%d/%m/%Y')
|
||||
return ordering_date
|
||||
|
||||
|
||||
def get_cpu_dflops(self, cpu_model):
|
||||
'''
|
||||
returns the number of double precision operation per second this cpu can achieve
|
||||
|
@ -88,11 +89,21 @@ class Inventory( object ):
|
|||
#INSERT INTO `cpu_specs` (`cpu_model`, `num_cores`, `clock_speed`, `dflops_per_core_per_cycle`, `comment`) VALUES
|
||||
#('intel_xeon_x5550', 4, 2.67, 4, ''),
|
||||
num_cores = int(self._sql_reader.get_table_attr( 'cpu_specs', 'cpu_model', cpu_model, 'num_cores' ))
|
||||
clock_speed = float(self._sql_reader.get_table_attr( 'cpu_specs', 'cpu_model', cpu_model, 'clock_speed' ))
|
||||
clock_speed = float(self._sql_reader.get_table_attr( 'cpu_specs', 'cpu_model', cpu_model, 'clock_speed' )) * 1.e9
|
||||
dflops_per_core_per_cycle = int(self._sql_reader.get_table_attr('cpu_specs', 'cpu_model', cpu_model, 'dflops_per_core_per_cycle' ))
|
||||
# print(num_cores, clock_speed, dflops_per_core_per_cycle)
|
||||
return clock_speed * dflops_per_core_per_cycle * num_cores
|
||||
|
||||
def get_num_cpus(self, computer_name):
|
||||
return int(self._sql_reader.get_table_attr( 'computer_to_cpu', 'computer_id', computer_name, 'num_cpu' ))
|
||||
|
||||
def get_cpu_model(self, computer_name):
|
||||
return self._sql_reader.get_table_attr( 'computer_to_cpu', 'computer_id', computer_name, 'cpu_model' )
|
||||
|
||||
def get_cpu_frequency(self, computer_name):
|
||||
cpu_model = self._sql_reader.get_table_attr( 'computer_to_cpu', 'computer_id', computer_name, 'cpu_model' )
|
||||
return float(self._sql_reader.get_table_attr( 'cpu_specs', 'cpu_model', cpu_model, 'clock_speed' ))
|
||||
|
||||
def get_computer_dflops(self, computer_name):
|
||||
# print(computer_serial_number)
|
||||
num_cpus = int(self._sql_reader.get_table_attr( 'computer_to_cpu', 'computer_id', computer_name, 'num_cpu' ))
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import re
|
||||
|
||||
def mysql_to_sqlite( mysql_sql_code ):
|
||||
def mysql_to_sqlite( mysql_sql_code, truncate_hex_strings = False ):
|
||||
"""
|
||||
converts a mysql-compatible sql code into a sqlite-ompatible sql code
|
||||
|
||||
|
@ -9,27 +9,64 @@ def mysql_to_sqlite( mysql_sql_code ):
|
|||
content = mysql_sql_code
|
||||
|
||||
# unused commands
|
||||
COMMAND_RE = re.compile(r'^(SET).*?;\n$', re.I | re.M | re.S)
|
||||
COMMAND_RE = re.compile(r'^(SET).*?;\n$', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
content = COMMAND_RE.sub('', content)
|
||||
|
||||
# sqlite doesn't like COMMENT= , remove it properly before the table constraint filter because the table constraint filter is not clever enough to cope with ; inside comment strings
|
||||
# ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='A list of URL aliases for Drupal paths; a user may visit...';
|
||||
COMMENTS_EQUAL_RE = re.compile(r'\s+COMMENT=\'[^\']*\'', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
# content = re.sub(r'^-- Tab[.]', 'toto', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
content = COMMENTS_EQUAL_RE.sub('', content)
|
||||
|
||||
# table constraints
|
||||
TCONS_RE = re.compile(r'\)(\s*(CHARSET|DEFAULT|ENGINE)(=.*?)?\s*)+;', re.I | re.M | re.S)
|
||||
TCONS_RE = re.compile(r'\)(\s*(CHARSET|DEFAULT|ENGINE)(=.*?)?\s*)+;', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
content = TCONS_RE.sub(');', content)
|
||||
|
||||
# remove comments
|
||||
# content = re.sub(r'^-- Tab[.]', 'toto', content, flags=re.I | re.M | re.S)
|
||||
# `nid` int(10) UNSIGNED NOT NULL DEFAULT '0' COMMENT 'The node.nid this record affects.',
|
||||
COMMENTS_RE = re.compile(r'\s+COMMENT\s+\'[^\']*\'', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
# content = re.sub(r'^-- Tab[.]', 'toto', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
content = COMMENTS_RE.sub('', content)
|
||||
|
||||
# sqlite doesn't like ' being escaped as \', use '' instead
|
||||
content = re.sub(r'\\\'', '\'\'', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL )
|
||||
|
||||
if truncate_hex_strings:
|
||||
# sqlite doesn't like too big hex strings 0x613a343a7b733a383a
|
||||
content = re.sub(r'0x[0-9a-f]+', '0xdeadbeef', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL )
|
||||
|
||||
# sqlite doesn't understand
|
||||
# `format` varchar(100) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL
|
||||
content = re.sub(r'\s+CHARACTER SET\s+[^\s]+', '', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL )
|
||||
|
||||
# sqlite doesn't know the utf8_bin :
|
||||
# `format` varchar(100) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL
|
||||
#no such collation sequence: utf8_bin
|
||||
content = re.sub(r'\s+COLLATE\s+utf8_bin\s+', ' ', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL )
|
||||
|
||||
# sqlite doesn't like 'unsigned' as in `ip_address_3` tinyint(3) unsigned NOT NULL default '27',
|
||||
content = re.sub(r' unsigned ', ' ', content)
|
||||
content = re.sub(r' unsigned ', ' ', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL )
|
||||
|
||||
# sqlite doesn't like 'enum' as in `type` enum('normal','light_out_management') NOT NULL default 'normal',,
|
||||
content = re.sub(r' enum\([^\)]*\) ', ' varchar(255) ', content)
|
||||
|
||||
# sqlite doesn't support much of alter table (https://www.sqlite.org/lang_altertable.html). The following is not supported :
|
||||
# ALTER TABLE `blocked_ips`
|
||||
# ADD PRIMARY KEY (`iid`),
|
||||
# ADD KEY `blocked_ip` (`ip`);
|
||||
content = re.sub(r'alter table [^;]*;', '', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL )
|
||||
|
||||
|
||||
# COMMIT;
|
||||
# sqlite3.OperationalError: cannot commit - no transaction is active
|
||||
content = re.sub(r'commit\s*;', '', content, flags=re.IGNORECASE | re.MULTILINE | re.DOTALL )
|
||||
|
||||
# insert multiple values
|
||||
# INSERTVALS_RE = re.compile(r'^(INSERT INTO.*?VALUES)\s*\((.*)\*;', re.I | re.M | re.S)
|
||||
INSERTVALS_RE = re.compile(r'^(INSERT INTO.*?VALUES)\s*([^;]*);', re.I | re.M | re.S)
|
||||
#INSERTVALS_RE = re.compile(r'^(INSERT INTO.*?VALUES)\s*((\[^\)](\)));$', re.I | re.M | re.S)
|
||||
INSERTVALS_SPLIT_RE = re.compile(r'\)\s*,\s*\(', re.I | re.M | re.S)
|
||||
# INSERTVALS_RE = re.compile(r'^(INSERT INTO.*?VALUES)\s*\((.*)\*;', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
INSERTVALS_RE = re.compile(r'^(INSERT INTO.*?VALUES)\s*([^;]*);', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
#INSERTVALS_RE = re.compile(r'^(INSERT INTO.*?VALUES)\s*((\[^\)](\)));$', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
INSERTVALS_SPLIT_RE = re.compile(r'\)\s*,\s*\(', re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
|
||||
|
||||
def insertvals_replacer(match):
|
||||
insert, values = match.groups()
|
||||
|
|
Loading…
Reference in New Issue