-# Copyright (c) 2009, 2010, 2011, 2012 Nicira Networks
+# Copyright (c) 2009, 2010, 2011, 2012, 2013 Nicira, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
__pychecker__ = 'no-classattr no-objattrs'
+ROW_CREATE = "create"
+ROW_UPDATE = "update"
+ROW_DELETE = "delete"
-class Idl:
+
+class Idl(object):
"""Open vSwitch Database Interface Definition Language (OVSDB IDL).
The OVSDB IDL maintains an in-memory replica of a database. It issues RPC
self.lock_name = lock_name
self.__send_lock_request()
+ def notify(self, event, row, updates=None):
+ """Hook for implementing create/update/delete notifications
+
+ :param event: The event that was triggered
+ :type event: ROW_CREATE, ROW_UPDATE, or ROW_DELETE
+ :param row: The row as it is after the operation has occured
+ :type row: Row
+ :param updates: For updates, a Row object with just the changed columns
+ :type updates: Row
+ """
+
def __clear(self):
changed = False
if row:
del table.rows[uuid]
changed = True
+ self.notify(ROW_DELETE, row)
else:
# XXX rate-limit
vlog.warn("cannot delete missing row %s from table %s"
% (uuid, table.name))
if self.__row_update(table, row, new):
changed = True
+ self.notify(ROW_CREATE, row)
else:
+ op = ROW_UPDATE
if not row:
row = self.__create_row(table, uuid)
changed = True
+ op = ROW_CREATE
# XXX rate-limit
vlog.warn("cannot modify missing row %s in table %s"
% (uuid, table.name))
if self.__row_update(table, row, new):
changed = True
+ self.notify(op, row, Row.from_json(self, table, uuid, old))
return changed
def __row_update(self, table, row, row_json):
datum = self._changes.get(column_name)
if datum is None:
+ if self._data is None:
+ raise AttributeError("%s instance has no attribute '%s'" %
+ (self.__class__.__name__, column_name))
datum = self._data[column_name]
return datum.to_python(_uuid_to_row)
return
self._idl.txn._write(self, column, datum)
+ @classmethod
+ def from_json(cls, idl, table, uuid, row_json):
+ data = {}
+ for column_name, datum_json in row_json.iteritems():
+ column = table.columns.get(column_name)
+ if not column:
+ # XXX rate-limit
+ vlog.warn("unknown column %s in table %s"
+ % (column_name, table.name))
+ continue
+ try:
+ datum = ovs.db.data.Datum.from_json(column.type, datum_json)
+ except error.Error, e:
+ # XXX rate-limit
+ vlog.warn("error parsing column %s in table %s: %s"
+ % (column_name, table.name, e))
+ continue
+ data[column_name] = datum
+ return cls(idl, table, uuid, data)
+
def verify(self, column_name):
"""Causes the original contents of column 'column_name' in this row to
be verified as a prerequisite to completing the transaction. That is,
assert self._changes is not None
if self._data is None:
del self._idl.txn._txn_rows[self.uuid]
+ else:
+ self._idl.txn._txn_rows[self.uuid] = self
self.__dict__["_changes"] = None
del self._table.rows[self.uuid]
self._status = Transaction.UNCOMMITTED
self._error = None
self._comments = []
- self._commit_seqno = self.idl.change_seqno
self._inc_row = None
self._inc_column = None
row = self._txn_rows.get(uuid, None)
if row and row._data is None:
return ["named-uuid", _uuid_name_from_uuid(uuid)]
+ else:
+ return [self._substitute_uuids(elem) for elem in json]
return json
def __disassemble(self):
The location on disk of the schema used may be found in the
'schema_location' variable."""
- def __init__(self, location=None):
- """Creates a new Schema object."""
+ def __init__(self, location=None, schema_json=None):
+ """Creates a new Schema object.
- if location is None:
- location = "%s/vswitch.ovsschema" % ovs.dirs.PKGDATADIR
+ 'location' file path to ovs schema. None means default location
+ 'schema_json' schema in json preresentation in memory
+ """
+
+ if location and schema_json:
+ raise ValueError("both location and schema_json can't be "
+ "specified. it's ambiguous.")
+ if schema_json is None:
+ if location is None:
+ location = "%s/vswitch.ovsschema" % ovs.dirs.PKGDATADIR
+ schema_json = ovs.json.from_file(location)
- self.schema_location = location
+ self.schema_json = schema_json
self._tables = {}
self._all = False
columns = set(columns) | self._tables.get(table, set())
self._tables[table] = columns
+ def register_table(self, table):
+ """Registers interest in the given all columns of 'table'. Future calls
+ to get_idl_schema() will include all columns of 'table'.
+
+ 'table' must be a string
+ """
+ assert type(table) is str
+ self._tables[table] = set() # empty set means all columns in the table
+
def register_all(self):
"""Registers interest in every column of every table."""
self._all = True
object based on columns registered using the register_columns()
function."""
- schema = ovs.db.schema.DbSchema.from_json(
- ovs.json.from_file(self.schema_location))
+ schema = ovs.db.schema.DbSchema.from_json(self.schema_json)
+ self.schema_json = None
if not self._all:
schema_tables = {}
assert table_name in schema.tables
table = schema.tables[table_name]
+ if not columns:
+ # empty set means all columns in the table
+ return table
+
new_columns = {}
for column_name in columns:
assert type(column_name) is str