Page MenuHome
No OneTemporary

from __future__ import division
import os
from bson.objectid import ObjectId
from eve.methods.put import put_internal
from import post_internal
from flask.ext.script import Manager
from application import app
from manage.node_types.act import node_type_act
from manage.node_types.asset import node_type_asset
from import node_type_blog
from manage.node_types.comment import node_type_comment
from import node_type_group
from import node_type_post
from manage.node_types.project import node_type_project
from manage.node_types.scene import node_type_scene
from manage.node_types.shot import node_type_shot
from import node_type_storage
from manage.node_types.task import node_type_task
from manage.node_types.texture import node_type_texture
from manage.node_types.group_texture import node_type_group_texture
manager = Manager(app)
MONGO_HOST = os.environ.get('MONGO_HOST', 'localhost')
def runserver():
import config
PORT = config.Development.PORT
HOST = config.Development.HOST
DEBUG = config.Development.DEBUG
app.config['STORAGE_DIR'] = config.Development.STORAGE_DIR
except ImportError:
# Default settings
PORT = 5000
HOST = ''
DEBUG = True
app.config['STORAGE_DIR'] = '{0}/application/static/storage'.format(
# Automatic creation of STORAGE_DIR path if it's missing
if not os.path.exists(app.config['STORAGE_DIR']):
def post_item(entry, data):
return post_internal(entry, data)
def put_item(collection, item):
item_id = item['_id']
internal_fields = ['_id', '_etag', '_updated', '_created']
for field in internal_fields:
item.pop(field, None)
# print item
# print type(item_id)
p = put_internal(collection, item, **{'_id': item_id})
if p[0]['_status'] == 'ERR':
print p
print item
def clear_db():
"""Wipes the database
from pymongo import MongoClient
client = MongoClient(MONGO_HOST, 27017)
db = client.eve
def upgrade_node_types():
"""Wipes node_types collection and populates it again"""
node_types_collection =['node_types']
node_types = node_types_collection.find({})
old_ids = {}
for node_type in node_types:
old_ids[node_type['name']] = node_type['_id']
def get_id(collection, name):
"""Returns the _id of the given collection and name"""
from pymongo import MongoClient
client = MongoClient(MONGO_HOST, 27017)
db = client.eve
node = db[collection].find({'name': name})
print (node[0]['_id'])
return node[0]['_id']
def manage_groups():
"""Take user email and group name,
and add or remove the user from that group.
from pymongo import MongoClient
client = MongoClient(MONGO_HOST, 27017)
db = client.eve
print ("")
print ("Add or Remove user from group")
print ("leave empty to cancel")
print ("")
# Select Action
print ("Do you want to Add or Remove the user from the group?")
retry = True
while retry:
action = raw_input('add/remove: ')
if action == '':
elif action.lower() in ['add', 'a', 'insert']:
action == 'add'
retry = False
elif action.lower() in ['remove', 'r', 'rmv', 'rem', 'delete', 'del']:
action = 'remove'
retry = False
print ("Incorrect action, press type 'add' or 'remove'")
# Select User
retry = True
while retry:
user_email = raw_input('User email: ')
if user_email == '':
user = db.users.find_one({'email': user_email})
if user:
retry = False
print ("Incorrect user email, try again, or leave empty to cancel")
# Select group
retry = True
while retry:
group_name = raw_input('Group name: ')
if group_name == '':
group = db.groups.find_one({'name': group_name})
if group:
retry = False
print ("Incorrect group name, try again, or leave empty to cancel")
# Do
current_groups = user.get('groups', [])
if action == 'add':
if group['_id'] in current_groups:
print "User {0} is already in group {1}".format(
user_email, group_name)
db.users.update({'_id': user['_id']},
{"$set": {'groups': current_groups}})
print "User {0} added to group {1}".format(user_email, group_name)
elif action == 'remove':
if group['_id'] not in current_groups:
print "User {0} is not in group {1}".format(user_email, group_name)
db.users.update({'_id': user['_id']},
{"$set": {'groups': current_groups}})
print "User {0} removed from group {1}".format(
user_email, group_name)
def populate_node_types(old_ids={}):
node_types_collection =['node_types']
def mix_node_type(old_id, node_type_dict):
# Take eve parameters
node_type = node_types_collection.find_one({'_id': old_id})
for attr in node_type:
if attr[0] == '_':
# Mix with node eve attributes. This is really not needed since
# the attributes are stripped before doing a put_internal.
node_type_dict[attr] = node_type[attr]
elif attr == 'permissions':
node_type_dict['permissions'] = node_type['permissions']
return node_type_dict
def upgrade(node_type, old_ids):
print("Node {0}".format(node_type['name']))
node_name = node_type['name']
if node_name in old_ids:
node_id = old_ids[node_name]
node_type = mix_node_type(node_id, node_type)
# Removed internal fields that would cause validation error
internal_fields = ['_id', '_etag', '_updated', '_created']
for field in internal_fields:
node_type.pop(field, None)
p = put_internal('node_types', node_type, **{'_id': node_id})
print("Making the node")
post_item('node_types', node_type)
# upgrade(shot_node_type, old_ids)
# upgrade(task_node_type, old_ids)
# upgrade(scene_node_type, old_ids)
# upgrade(act_node_type, old_ids)
upgrade(node_type_project, old_ids)
upgrade(node_type_group, old_ids)
upgrade(node_type_asset, old_ids)
upgrade(node_type_storage, old_ids)
upgrade(node_type_comment, old_ids)
upgrade(node_type_blog, old_ids)
upgrade(node_type_post, old_ids)
upgrade(node_type_texture, old_ids)
upgrade(node_type_group_texture, old_ids)
def add_parent_to_nodes():
"""Find the parent of any node in the nodes collection"""
import codecs
import sys
UTF8Writer = codecs.getwriter('utf8')
sys.stdout = UTF8Writer(sys.stdout)
nodes_collection =['nodes']
def find_parent_project(node):
if node and 'parent' in node:
parent = nodes_collection.find_one({'_id': node['parent']})
return find_parent_project(parent)
if node:
return node
return None
nodes = nodes_collection.find()
nodes_index = 0
nodes_orphan = 0
for node in nodes:
nodes_index += 1
if node['node_type'] == ObjectId("55a615cfea893bd7d0489f2d"):
print u"Skipping project node - {0}".format(node['name'])
project = find_parent_project(node)
if project:
nodes_collection.update({'_id': node['_id']},
{"$set": {'project': project['_id']}})
print u"{0} {1}".format(node['_id'], node['name'])
nodes_orphan += 1
nodes_collection.remove({'_id': node['_id']})
print "Removed {0} {1}".format(node['_id'], node['name'])
print "Edited {0} nodes".format(nodes_index)
print "Orphan {0} nodes".format(nodes_orphan)
def embed_children_in_files():
"""Embed children file objects in to their parent"""
files_collection =['files']
for f in files_collection.find():
# Give some feedback
print "processing {0}".format(f['_id'])
# Proceed only if the node is a child
file_id = f['_id']
if 'parent' in f:
# Get the parent node
parent = files_collection.find_one({'_id': f['parent']})
if not parent:
print "No parent found for {0}".format(file_id)
files_collection.remove({'_id': file_id})
parent_id = parent['_id']
# Prepare to loop through the properties required for a variation
properties = ['content_type', 'duration', 'size', 'format', 'width',
'height', 'length', 'md5', 'file_path']
variation = {}
# Build dict with variation properties
for p in properties:
if p in f:
variation[p] = f[p]
# the variation was generated
if variation:
# If the parent file does not have a variation property
if 'variations' not in parent:
parent['variations'] = []
# Append the variation to the variations
# Removed internal fields that would cause validation error
internal_fields = ['_id', '_etag', '_updated', '_created']
for field in internal_fields:
parent.pop(field, None)
p = put_internal('files', parent, **{'_id': parent_id})
if p[0]['_status'] == 'ERR':
print p[0]['_issues']
print "PARENT: {0}".format(parent)
print "VARIATION: {0}".format(variation)
def remove_children_files():
"""Remove any file object with a parent field"""
files_collection =['files']
for f in files_collection.find():
if 'parent' in f:
file_id = f['_id']
# Delete child object
files_collection.remove({'_id': file_id})
print "deleted {0}".format(file_id)
def make_project_public(project_id):
"""Convert every node of a project from pending to public"""
DRY_RUN = False
nodes_collection =['nodes']
for n in nodes_collection.find({'project': ObjectId(project_id)}):
n['properties']['status'] = 'published'
print "Publishing {0} {1}".format(n['_id'], n['name'])
if not DRY_RUN:
put_item('nodes', n)
def convert_assets_to_textures(project_id):
"""Get any node of type asset in a certain project and convert it to a
node_type texture.
DRY_RUN = False
node_types_collection =['node_types']
files_collection =['files']
nodes_collection =['nodes']
def parse_name(name):
"""Parse a texture name to infer properties"""
variation = 'col'
is_tileable = False
variations = ['_bump', '_spec', '_nor', '_col', '_translucency']
for v in variations:
if v in name:
variation = v[1:]
if '_tileable' in name:
is_tileable = True
return dict(variation=variation, is_tileable=is_tileable)
def make_texture_node(base_node, files, parent_id=None):
texture_node_type = node_types_collection.find_one({'name':'texture'})
files_list = []
is_tileable = False
if parent_id is None:
parent_id = base_node['parent']
print "Using provided parent {0}".format(parent_id)
# Create a list with all the file fariations for the texture
for f in files:
print "Processing {1} {0}".format(f['name'], f['_id'])
attributes = parse_name(f['name'])
if attributes['is_tileable']:
is_tileable = True
file_entry = dict(
# Get the first file from the files list and use it as base for some
# node properties
first_file = files_collection.find_one({'_id': files[0]['properties']['file']})
if 'picture' in base_node and base_node['picture'] != None:
picture = base_node['picture']
picture = first_file['_id']
if 'height' in first_file:
node = dict(
resolution="{0}x{1}".format(first_file['height'], first_file['width']),
is_landscape=(first_file['height'] < first_file['width']),
(first_file['width'] / first_file['height']), 2)
print "Making {0}".format(node['name'])
if not DRY_RUN:
p = post_internal('nodes', node)
if p[0]['_status'] == 'ERR':
import pprint
nodes_collection =['nodes']
for n in nodes_collection.find({'project': ObjectId(project_id)}):
n_type = node_types_collection.find_one({'_id': n['node_type']})
processed_nodes = []
if n_type['name'] == 'group' and n['name'].startswith('_'):
print "Processing {0}".format(n['name'])
# Get the content of the group
children = [c for c in nodes_collection.find({'parent': n['_id']})]
make_texture_node(children[0], children, parent_id=n['parent'])
processed_nodes += children
elif n_type['name'] == 'group':
# Change group type to texture group
node_type_texture = node_types_collection.find_one({'name':'group_texture'})
n['node_type'] = node_type_texture['_id']
n['properties'].pop('notes', None)
print "Updating {0}".format(n['name'])
if not DRY_RUN:
put_item('nodes', n)
# Delete processed nodes
for node in processed_nodes:
print "Removing {0} {1}".format(node['_id'], node['name'])
if not DRY_RUN:
nodes_collection.remove({'_id': node['_id']})
# Make texture out of single image
for n in nodes_collection.find({'project': ObjectId(project_id)}):
n_type = node_types_collection.find_one({'_id': n['node_type']})
if n_type['name'] == 'asset':
make_texture_node(n, [n])
# Delete processed nodes
print "Removing {0} {1}".format(n['_id'], n['name'])
if not DRY_RUN:
nodes_collection.remove({'_id': n['_id']})
def set_attachment_names():
"""Loop through all existing nodes and assign proper ContentDisposition
metadata to referenced files that are using GCS.
from application import update_file_name
nodes_collection =['nodes']
for n in nodes_collection.find():
print "Updating node {0}".format(n['_id'])
def files_verify_project():
"""Verify for missing or conflicting node/file ids"""
nodes_collection =['nodes']
files_collection =['files']
issues = dict(missing=[], conflicting=[], processing=[])
def _parse_file(item, file_id):
f = files_collection.find_one({'_id': file_id})
if f:
if 'project' in item and 'project' in f:
if item['project'] != f['project']:
if 'status' in item['properties'] \
and item['properties']['status'] == 'processing':
"{0} missing {1}".format(item['_id'], file_id))
for item in nodes_collection.find():
print "Verifying node {0}".format(item['_id'])
if 'file' in item['properties']:
_parse_file(item, item['properties']['file'])
elif 'files' in item['properties']:
for f in item['properties']['files']:
_parse_file(item, f['file'])
print "==="
print "Issues detected:"
for k, v in issues.iteritems():
print "{0}:".format(k)
for i in v:
print i
print "==="
def replace_node_type(project, node_type_name, new_node_type):
"""Update or create the specified node type. We rely on the fact that
node_types have a unique name in a project.
old_node_type = next(
(item for item in project['node_types'] if item.get('name') \
and item['name'] == node_type_name), None)
if old_node_type:
for i, v in enumerate(project['node_types']):
if v['name'] == node_type_name:
project['node_types'][i] = new_node_type
def project_upgrade_node_types(project_id):
projects_collection =['projects']
project = projects_collection.find_one({'_id': ObjectId(project_id)})
replace_node_type(project, 'group', node_type_group)
replace_node_type(project, 'asset', node_type_asset)
replace_node_type(project, 'storage', node_type_storage)
replace_node_type(project, 'comment', node_type_comment)
replace_node_type(project, 'blog', node_type_blog)
replace_node_type(project, 'post', node_type_post)
replace_node_type(project, 'texture', node_type_texture)
put_item('projects', project)
def test_put_item(node_id):
import pprint
nodes_collection =['nodes']
node = nodes_collection.find_one(ObjectId(node_id))
put_item('nodes', node)
def test_post_internal(node_id):
import pprint
nodes_collection =['nodes']
node = nodes_collection.find_one(ObjectId(node_id))
internal_fields = ['_id', '_etag', '_updated', '_created']
for field in internal_fields:
node.pop(field, None)
print post_internal('nodes', node)
if __name__ == '__main__':

File Metadata

Mime Type
Tue, Jan 31, 11:59 AM (2 d)
Storage Engine
Storage Format
Raw Data
Storage Handle

Event Timeline