Datasets:
code stringlengths 1 1.49M | vector listlengths 0 7.38k | snippet listlengths 0 7.38k |
|---|---|---|
if False: # set to True to insert test data
store(store.product.id > 0).delete()
store(store.category.id > 0).delete()
if len(store(store.product.id > 0).select()) == 0:
fantasy_id = store.category.insert(name='Fantasy', description='Fantasy books', small_image='testdata/hp1.jpg')
hp1 = store.product.insert(name="Harry Potter and the Sorcerer's Stone", category=fantasy_id, price=7.91, small_image='testdata/hp1.jpg')
hp2 = store.product.insert(name="Harry Potter and the Chamber of Secrets", category=fantasy_id, price=8.91, small_image='testdata/hp2.jpg')
hp3 = store.product.insert(name="Harry Potter and the Prisoner of Azkaban", category=fantasy_id, price=8.91, small_image='testdata/hp3.jpg')
hp4 = store.product.insert(name="Harry Potter and the Goblet of Fire", category=fantasy_id, price=9.91, small_image='testdata/hp4.jpg')
hp5 = store.product.insert(name="Harry Potter and the Order of the Phoenix", category=fantasy_id, price=9.91, small_image='testdata/hp5.jpg')
hp6 = store.product.insert(name="Harry Potter and the Half-Blood Prince", category=fantasy_id, price=9.91, small_image='testdata/hp6.jpg')
store.option.insert(product=hp1, description='Bookmark', price=1.5)
store.option.insert(product=hp1, description='Wizard hat', price=12)
for p2 in (hp2, hp3, hp4, hp5, hp6):
store.cross_sell.insert(p1=hp1, p2=p2)
hp1_hard = store.product.insert(name="Harry Potter and the Sorcerer's Stone [hardcover]", category=fantasy_id, price=15.91, small_image='testdata/hp1.jpg')
store.up_sell.insert(product=hp1, better=hp1_hard)
| [
[
4,
0,
0.525,
1,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
19
],
[
8,
1,
0.1,
0.05,
1,
0.89,
0,
266,
3,
0,
0,
0,
0,
0,
2
],
[
8,
1,
0.15,
0.05,
1,
0.89,
0.5,
266,
... | [
"if False: # set to True to insert test data \n store(store.product.id > 0).delete()\n store(store.category.id > 0).delete()\n if len(store(store.product.id > 0).select()) == 0:\n fantasy_id = store.category.insert(name='Fantasy', description='Fantasy books', small_image='testdata/hp1.jpg')\n ... |
UNDEFINED = -1
if request.env.web2py_runtime_gae: # if running on Google App Engine
store = DAL('gae') # connect to Google BigTable
session.connect(request, response, db=store) # and store sessions and tickets there
else:
store = DAL("sqlite://store.db")
store.define_table('category',
Field('name'),
Field('description', 'text'),
Field('small_image', 'upload'),
)
store.define_table('product',
Field('name'),
Field('category', store.category),
Field('description', 'text', default=''),
Field('small_image', 'upload'),
Field('large_image', 'upload', default=''),
Field('quantity_in_stock', 'integer', default=UNDEFINED), # if UNDEFINED, don't show
Field('max_quantity', 'integer', default=0), # maximum quantity that can be purchased in an order. If 0, no limit. If UNDEFINED, don't show.
Field('price', 'double', default=1.0),
Field('old_price', 'double', default=0.0),
Field('weight_in_pounds', 'double', default=1),
Field('tax_rate_in_your_state', 'double', default=10.0),
Field('tax_rate_outside_your_state', 'double', default=0.0),
Field('featured', 'boolean', default=False),
Field('allow_rating', 'boolean', default=False),
Field('rating', 'integer', default='0'),
Field('viewed', 'integer', default='0'),
Field('clicked', 'integer', default='0'))
# each product can have optional addons
store.define_table('option',
Field('product', store.product),
Field('description'),
Field('price', 'double', default=1.0),
)
# support for merchandising
# for p1 show p2, and for p2 show p1
store.define_table('cross_sell',
Field('p1', store.product),
Field('p2', store.product),
)
# for product, show better, but not the reverse
store.define_table('up_sell',
Field('product', store.product),
Field('better', store.product),
)
store.define_table('comment',
Field('product', store.product),
Field('author'),
Field('email'),
Field('body', 'text'),
Field('rate', 'integer')
)
store.define_table('info',
Field('google_merchant_id', default='[google checkout id]', length=256),
Field('name', default='[store name]'),
Field('headline', default='[store headline]'),
Field('address', default='[store address]'),
Field('city', default='[store city]'),
Field('state', default='[store state]'),
Field('zip_code', default='[store zip]'),
Field('phone', default='[store phone number]'),
Field('fax', default='[store fax number]'),
Field('email', requires=IS_EMAIL(), default='yourname@yourdomain.com'),
Field('description', 'text', default='[about your store]'),
Field('why_buy', 'text', default='[why buy at your store]'),
Field('return_policy', 'text', default='[what is your return policy]'),
Field('logo', 'upload', default=''),
Field('color_background', length=10, default='white'),
Field('color_foreground', length=10, default='black'),
Field('color_header', length=10, default='#F6F6F6'),
Field('color_link', length=10, default='#385ea2'),
Field('font_family', length=32, default='arial, helvetica'),
Field('ship_usps_express_mail', 'boolean', default=True),
Field('ship_usps_express_mail_fc', 'double', default=0),
Field('ship_usps_express_mail_vc', 'double', default=0),
Field('ship_usps_express_mail_bc', 'double', default=0),
Field('ship_usps_priority_mail', 'boolean', default=True),
Field('ship_usps_priority_mail_fc', 'double', default=0),
Field('ship_usps_priority_mail_vc', 'double', default=0),
Field('ship_usps_priority_mail_bc', 'double', default=0),
Field('ship_ups_next_day_air', 'boolean', default=True),
Field('ship_ups_next_day_air_fc', 'double', default=0),
Field('ship_ups_next_day_air_vc', 'double', default=0),
Field('ship_ups_next_day_air_bc', 'double', default=0),
Field('ship_ups_second_day_air', 'boolean', default=True),
Field('ship_ups_second_day_air_fc', 'double', default=0),
Field('ship_ups_second_day_air_vc', 'double', default=0),
Field('ship_ups_second_day_air_bc', 'double', default=0),
Field('ship_ups_ground', 'boolean', default=True),
Field('ship_ups_ground_fc', 'double', default=0),
Field('ship_ups_ground_vc', 'double', default=0),
Field('ship_ups_ground_bc', 'double', default=0),
Field('ship_fedex_priority_overnight', 'boolean', default=True),
Field('ship_fedex_priority_overnight_fc', 'double', default=0),
Field('ship_fedex_priority_overnight_vc', 'double', default=0),
Field('ship_fedex_priority_overnight_bc', 'double', default=0),
Field('ship_fedex_second_day', 'boolean', default=True),
Field('ship_fedex_second_day_fc', 'double', default=0),
Field('ship_fedex_second_day_vc', 'double', default=0),
Field('ship_fedex_second_day_bc', 'double', default=0),
Field('ship_fedex_ground', 'boolean', default=True),
Field('ship_fedex_ground_fc', 'double', default=0),
Field('ship_fedex_ground_vc', 'double', default=0),
Field('ship_fedex_ground_bc', 'double', default=0)
)
store.category.name.requires = IS_NOT_IN_DB(store, 'category.name')
store.product.name.requires = IS_NOT_IN_DB(store, 'product.name')
store.product.category.requires = IS_IN_DB(store, 'category.id', 'category.name')
store.product.name.requires = IS_NOT_EMPTY()
store.product.description.requires = IS_NOT_EMPTY()
store.product.quantity_in_stock.requires = IS_INT_IN_RANGE(0, 1000)
store.product.price.requires = IS_FLOAT_IN_RANGE(0, 10000)
store.product.rating.requires = IS_INT_IN_RANGE(-10000, 10000)
store.product.viewed.requires = IS_INT_IN_RANGE(0, 1000000)
store.product.clicked.requires = IS_INT_IN_RANGE(0, 1000000)
store.option.product.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.cross_sell.p1.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.cross_sell.p2.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.up_sell.product.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.up_sell.better.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.comment.product.requires = IS_IN_DB(store, 'product.id', 'product.name')
store.comment.author.requires = IS_NOT_EMPTY()
store.comment.email.requires = IS_EMAIL()
store.comment.body.requires = IS_NOT_EMPTY()
store.comment.rate.requires = IS_IN_SET(range(5, 0, -1))
for field in store.info.fields:
if field[:-2] in ['fc', 'vc']:
store.info[field].requires = IS_FLOAT_IN_RANGE(0, 100)
if len(store(store.info.id > 0).select()) == 0:
store.info.insert(name='[store name]')
mystore = store(store.info.id > 0).select()[0] | [
[
14,
0,
0.0065,
0.0065,
0,
0.66,
0,
67,
0,
0,
0,
0,
0,
0,
0
],
[
4,
0,
0.039,
0.0325,
0,
0.66,
0.0323,
0,
7,
0,
0,
0,
0,
0,
3
],
[
14,
1,
0.0325,
0.0065,
1,
0.3,
... | [
"UNDEFINED = -1",
"if request.env.web2py_runtime_gae: # if running on Google App Engine\n store = DAL('gae') # connect to Google BigTable\n session.connect(request, response, db=store) # and store sessions and tickets there\nelse:\n store = DAL(\"sqlite://store.db\")"... |
#
import re
# delimiter to use between words in URL
URL_DELIMITER = '-'
def pretty_url(id, name):
"""Create pretty URL from record name and ID
"""
return '%s%s%d' % (' '.join(re.sub('[^\w ]+', '', name).split()).replace(' ', URL_DELIMITER), URL_DELIMITER, id)
def pretty_id(url):
"""Extract id from pretty URL
"""
return int(url.rpartition(URL_DELIMITER)[-1])
def pretty_text(s):
"Make text pretty by capitalizing and using 'home' instead of 'default'"
return s.replace('default', 'home').replace('_', ' ').capitalize()
def title():
if response.title:
return response.title
elif request.function == 'index':
return pretty_text(request.controller)
else:
return pretty_text(request.function)
| [
[
1,
0,
0.1071,
0.0357,
0,
0.66,
0,
540,
0,
1,
0,
0,
540,
0,
0
],
[
14,
0,
0.25,
0.0357,
0,
0.66,
0.2,
352,
1,
0,
0,
0,
0,
3,
0
],
[
2,
0,
0.3393,
0.1429,
0,
0.66,
... | [
"import re",
"URL_DELIMITER = '-'",
"def pretty_url(id, name):\n \"\"\"Create pretty URL from record name and ID\n \"\"\"\n return '%s%s%d' % (' '.join(re.sub('[^\\w ]+', '', name).split()).replace(' ', URL_DELIMITER), URL_DELIMITER, id)",
" \"\"\"Create pretty URL from record name and ID\n \"\... |
###########################################################
### make sure administrator is on localhost
############################################################
import os, socket, datetime,copy
import gluon.contenttype
import gluon.fileutils
### crytical --- make a copy of the environment
global_env=copy.copy(globals())
global_env['datetime']=datetime
http_host = request.env.http_host.split(':')[0]
remote_addr = request.env.remote_addr
try: hosts=(http_host, socket.gethostbyname(remote_addr))
except: hosts=(http_host,)
if remote_addr not in hosts:
pass #raise HTTP(400)
if not gluon.fileutils.check_credentials(request):
redirect('/admin')
response.view='appadmin.html'
response.menu=[[T('design'),False,URL('admin','default','design',
args=[request.application])],
[T('db'),False,URL(r=request,f='index')],
[T('state'),False,URL(r=request,f='state')]]
###########################################################
### auxiliary functions
############################################################
def get_databases(request):
dbs={}
for key,value in global_env.items():
cond=False
try: cond=isinstance(value,GQLDB)
except: cond=isinstance(value,SQLDB)
if cond: dbs[key]=value
return dbs
databases=get_databases(None)
def eval_in_global_env(text):
exec('_ret=%s'%text,{},global_env)
return global_env['_ret']
def get_database(request):
if request.args and request.args[0] in databases:
return eval_in_global_env(request.args[0])
else:
session.flash=T('invalid request')
redirect(URL(r=request,f='index'))
def get_table(request):
db=get_database(request)
if len(request.args)>1 and request.args[1] in db.tables:
return db,request.args[1]
else:
session.flash=T('invalid request')
redirect(URL(r=request,f='index'))
def get_query(request):
try:
return eval_in_global_env(request.vars.query)
except Exception:
return None
###########################################################
### list all databases and tables
############################################################
def index():
return dict(databases=databases)
###########################################################
### insert a new record
############################################################
def insert():
db,table=get_table(request)
form=SQLFORM(db[table])
if form.accepts(request.vars,session):
response.flash=T('new record inserted')
return dict(form=form)
###########################################################
### list all records in table and insert new record
############################################################
def download():
import os
db=get_database(request)
filename=request.args[1]
print filename
### for GAE only ###
table,field=filename.split('.')[:2]
if table in db.tables and field in db[table].fields:
uploadfield=db[table][field].uploadfield
if isinstance(uploadfield,str):
from gluon.contenttype import contenttype
response.headers['Content-Type']=contenttype(filename)
rows=db(db[table][field]==filename).select()
return rows[0][uploadfield]
### end for GAE ###
path=os.path.join(request.folder,'uploads/',filename)
return response.stream(open(path,'rb'))
def csv():
import gluon.contenttype
response.headers['Content-Type']=gluon.contenttype.contenttype('.csv')
query=get_query(request)
if not query: return None
response.headers['Content-disposition']="attachment; filename=%s_%s.csv"%\
tuple(request.vars.query.split('.')[:2])
return str(db(query).select())
def import_csv(table,file):
import csv
reader = csv.reader(file)
colnames=None
for line in reader:
if not colnames:
colnames=[x[x.find('.')+1:] for x in line]
c=[i for i in range(len(line)) if colnames[i]!='id']
else:
items=[(colnames[i],line[i]) for i in c]
table.insert(**dict(items))
def select():
import re
db=get_database(request)
dbname=request.args[0]
regex=re.compile('(?P<table>\w+)\.(?P<field>\w+)=(?P<value>\d+)')
if request.vars.query:
match=regex.match(request.vars.query)
if match: request.vars.query='%s.%s.%s==%s' % (request.args[0],match.group('table'),match.group('field'),match.group('value'))
else:
request.vars.query=session.last_query
query=get_query(request)
if request.vars.start: start=int(request.vars.start)
else: start=0
nrows=0
stop=start+100
table=None
rows=[]
orderby=request.vars.orderby
if orderby:
orderby=dbname+'.'+orderby
if orderby==session.last_orderby:
if orderby[0]=='~': orderby=orderby[1:]
else: orderby='~'+orderby
session.last_orderby=orderby
session.last_query=request.vars.query
form=FORM(TABLE(TR('Query:','',INPUT(_style='width:400px',_name='query',_value=request.vars.query or '',requires=IS_NOT_EMPTY())),
TR('Update:',INPUT(_name='update_check',_type='checkbox',value=False),
INPUT(_style='width:400px',_name='update_fields',_value=request.vars.update_fields or '')),
TR('Delete:',INPUT(_name='delete_check',_class='delete',_type='checkbox',value=False),''),
TR('','',INPUT(_type='submit',_value='submit'))))
if request.vars.csvfile!=None:
try:
import_csv(db[request.vars.table],request.vars.csvfile.file)
response.flash=T('data uploaded')
except:
response.flash=T('unable to parse csv file')
if form.accepts(request.vars,formname=None):
regex=re.compile(request.args[0]+'\.(?P<table>\w+)\.id\>0')
match=regex.match(form.vars.query.strip())
if match: table=match.group('table')
try:
nrows=db(query).count()
if form.vars.update_check and form.vars.update_fields:
db(query).update(**eval_in_global_env('dict(%s)'%form.vars.update_fields))
response.flash=T('%s rows updated',nrows)
elif form.vars.delete_check:
db(query).delete()
response.flash=T('%s rows deleted',nrows)
nrows=db(query).count()
if orderby: rows=db(query).select(limitby=(start,stop), orderby=eval_in_global_env(orderby))
else: rows=db(query).select(limitby=(start,stop))
except:
rows,nrows=[],0
response.flash=T('Invalid Query')
return dict(form=form,table=table,start=start,stop=stop,nrows=nrows,rows=rows,query=request.vars.query)
###########################################################
### edit delete one record
############################################################
def update():
db,table=get_table(request)
try:
id=int(request.args[2])
record=db(db[table].id==id).select()[0]
except:
session.flash=T('record does not exist')
redirect(URL(r=request,f='select',args=request.args[:1],vars=dict(query='%s.%s.id>0'%tuple(request.args[:2]))))
form=SQLFORM(db[table],record,deletable=True,
linkto=URL(r=request,f='select',args=request.args[:1]),
upload=URL(r=request,f='download',args=request.args[:1]))
if form.accepts(request.vars,session):
response.flash=T('done!')
redirect(URL(r=request,f='select',args=request.args[:1],vars=dict(query='%s.%s.id>0'%tuple(request.args[:2]))))
return dict(form=form)
###########################################################
### get global variables
############################################################
def state(): return dict() | [
[
1,
0,
0.0288,
0.0048,
0,
0.66,
0,
688,
0,
4,
0,
0,
688,
0,
0
],
[
1,
0,
0.0337,
0.0048,
0,
0.66,
0.04,
919,
0,
1,
0,
0,
919,
0,
0
],
[
1,
0,
0.0385,
0.0048,
0,
0.... | [
"import os, socket, datetime,copy",
"import gluon.contenttype",
"import gluon.fileutils",
"global_env=copy.copy(globals())",
"global_env['datetime']=datetime",
"http_host = request.env.http_host.split(':')[0]",
"remote_addr = request.env.remote_addr",
"try: hosts=(http_host, socket.gethostbyname(remot... |
if not session.cart:
# instantiate new cart
session.cart, session.balance = [], 0
session.google_merchant_id = mystore.google_merchant_id
response.menu = [
['Store Front', request.function == 'index', URL(r=request, f='index')],
['About Us', request.function == 'aboutus', URL(r=request, f='aboutus')],
['Contact Us', request.function == 'contactus', URL(r=request, f='contactus')],
['Shopping Cart $%.2f' % float(session.balance), request.function == 'checkout', URL(r=request, f='checkout')]
]
def index():
categories = store().select(store.category.ALL, orderby=store.category.name)
featured = store(store.product.featured == True).select()
return dict(categories=categories,featured=featured)
def category():
if not request.args: redirect(URL(r=request, f='index'))
category_id = pretty_id(request.args[0])
if len(request.args) == 3:
# pagination
start, stop = int(request.args[1]), int(request.args[2])
else:
start, stop = 0, 20
categories = store().select(store.category.ALL, orderby=store.category.name)
category_name = None
for category in categories:
if category.id == category_id:
response.title = category_name = category.name
if not category_name: redirect(URL(r=request, f='index'))
if start == 0:
featured = store(store.product.featured == True)(store.product.category == category_id).select()
else:
featured = []
ids = [p.id for p in featured]
favourites = store(store.product.category == category_id).select(limitby=(start, stop))
favourites = [f for f in favourites if f.id not in ids]
return dict(category_name=category_name, categories=categories, featured=featured, favourites=favourites)
def product():
if not request.args: redirect(URL(r=request, f='index'))
product_id = pretty_id(request.args[0])
products = store(store.product.id == product_id).select()
if not products: redirect(URL(r=request, f='index'))
product = products[0]
response.title = product.name
product.update_record(viewed=product.viewed+1)
options = store(store.option.product == product.id).select()
product_form = FORM(
TABLE(
[TR(TD(INPUT(_name='option', _value=option.id, _type='checkbox', _onchange="update_price(this, %.2f)" % option.price), option.description), H3('$%.2f' % option.price)) for option in options],
TR(
'Price:',
H2('$%.2f' % float(product.price), _id='total_price')
),
BR(),
TH('Qty:', INPUT(_name='quantity', _class='integer', _value=1, _size=1)), INPUT(_type='submit', _value='Add to cart'),
)
)
if product_form.accepts(request.vars, session):
quantity = int(product_form.vars.quantity)
option_ids = product_form.vars.option
if not isinstance(option_ids, list):
option_ids = [option_ids] if option_ids else []
option_ids = [int(o) for o in option_ids]
product.update_record(clicked=product.clicked+1)
session.cart.append((product_id, quantity, option_ids))
redirect(URL(r=request, f='checkout'))
# post a comment about a product
comment_form = SQLFORM(store.comment, fields=['author', 'email', 'body', 'rate'])
comment_form.vars.product = product.id
if comment_form.accepts(request.vars, session):
nc = store(store.comment.product == product.id).count()
t = products[0].rating*nc + int(comment_form.vars.rate)
products[0].update_record(rating=t/(nc+1))
response.flash = 'comment posted'
if comment_form.errors: response.flash = 'invalid comment'
comments = store(store.comment.product == product.id).select()
better_ids = [row.better for row in store(store.up_sell.product == product.id).select(store.up_sell.better)]
related_ids = [row.p2 for row in store(store.cross_sell.p1 == product.id).select()] + [row.p1 for row in store(store.cross_sell.p2 == product.id).select()]
suggested = [store.product[id] for id in better_ids + related_ids] # XXXstore(store.product.id.belongs(better_ids + related_ids)).select()
return dict(product=product, comments=comments, options=options, suggested=suggested, product_form=product_form, comment_form=comment_form)
"""
{{ if product.old_price: }}
<b>was ${{= '%.2f' % float(product.old_price) }}</b>
{{ pass }}
</form>
"""
def remove_from_cart():
# remove product from cart
del session.cart[int(request.args[0])]
redirect(URL(r=request, f='checkout'))
def empty_cart():
# empty cart of all products
session.cart.clear()
session.balance = 0
redirect(URL(r=request, f='checkout'))
def checkout():
order = []
balance = 0
for product_id, qty, option_ids in session.cart:
products = store(store.product.id == product_id).select()
if products:
product = products[0]
options = [store.option[id] for id in option_ids]# XXX store(store.option.id.belongs(option_ids)).select() if option_ids else []
total_price = qty * (product.price + sum([option.price for option in options]))
order.append((product_id, qty, total_price, product, options))
balance += total_price
else:
# invalid product
pass
session.balance = balance # XXX is updating in time?
return dict(order=order, merchant_id=session.google_merchant_id)
def popup():
return dict()
def show():
response.session_id = None
import gluon.contenttype, os
filename = '/'.join(request.args)
response.headers['Content-Type'] = gluon.contenttype.contenttype(filename)
# XXX is this path going to be a problem on Windows?
return open(os.path.join(request.folder, 'uploads', filename), 'rb').read()
def aboutus(): return dict()
def contactus(): return dict()
| [
[
4,
0,
0.0203,
0.0203,
0,
0.66,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
[
14,
1,
0.027,
0.0068,
1,
0.72,
0,
0,
0,
0,
0,
0,
0,
8,
0
],
[
14,
0,
0.0338,
0.0068,
0,
0.66,
0.... | [
"if not session.cart:\n # instantiate new cart\n session.cart, session.balance = [], 0",
" session.cart, session.balance = [], 0",
"session.google_merchant_id = mystore.google_merchant_id",
"response.menu = [\n ['Store Front', request.function == 'index', URL(r=request, f='index')],\n ['About Us', ... |
###########################################################
### make sure administrator is on localhost
############################################################
import os
from gluon.contenttype import contenttype
from gluon.fileutils import check_credentials, listdir
if not session.authorized and not request.function=='login':
redirect(URL(r=request,f='login'))
response.view='manage.html'
response.menu=[['manage',True,'/%s/manage/index' % (request.application)],
['logout',False,'/%s/manage/logout' % (request.application)],
['back to store',False,'/%s/default/index' % (request.application)]]
###########################################################
### list all tables in database
############################################################
def login():
response.view='manage/login.html'
from gluon.fileutils import check_credentials
if check_credentials(request,'admin'):
session.authorized=True
redirect(URL(r=request,f='index'))
return dict()
def logout():
session.authorized=False
redirect(URL(r=request,c='default',f='index'))
def index():
import types as _types
_dbs={}
for _key,_value in globals().items():
try:
if _value.__class__==SQLDB:
tables=_dbs[_key]=[]
for _tablename in _value.tables():
tables.append((_key,_tablename))
except: pass
return dict(dbs=_dbs)
###########################################################
### insert a new record
############################################################
def insert():
try:
dbname=request.args[0]
db=eval(dbname)
table=request.args[1]
form=SQLFORM(db[table])
except: redirect(URL(r=request,f='index'))
if form.accepts(request.vars,session):
response.flash='new record inserted'
redirect(URL(r=request,f='select',args=request.args))
elif len(request.vars):
response.flash='There are error in your submission form'
return dict(form=form)
###########################################################
### list all records in table and insert new record
############################################################
def download():
filename=request.args[0]
response.headers['Content-Type']=contenttype(filename)
return open(os.path.join(request.folder,'uploads/','%s' % filename),'rb').read()
def csv():
import gluon.contenttype, csv, cStringIO
response.headers['Content-Type']=gluon.contenttype.contenttype('.csv')
try:
dbname=request.vars.dbname
db=eval(dbname)
records=db(request.vars.query).select()
except: redirect(URL(r=request,f='index'))
s=cStringIO.StringIO()
writer = csv.writer(s)
writer.writerow(records.colnames)
c=range(len(records.colnames))
for i in range(len(records)):
writer.writerow([records.response[i][j] for j in c])
### FILL HERE
return s.getvalue()
def import_csv(table,file):
import csv
reader = csv.reader(file)
colnames=None
for line in reader:
if not colnames:
colnames=[x[x.find('.')+1:] for x in line]
c=[i for i in range(len(line)) if colnames[i]!='id']
else:
items=[(colnames[i],line[i]) for i in c]
table.insert(**dict(items))
def select():
try:
dbname=request.args[0]
db=eval(dbname)
if not request.vars.query:
table=request.args[1]
query='%s.id>0' % table
else: query=request.vars.query
except: redirect(URL(r=request,f='index'))
if request.vars.csvfile!=None:
try:
import_csv(db[table],request.vars.csvfile.file)
response.flash='data uploaded'
except: reponse.flash='unable to parse csv file'
if request.vars.delete_all and request.vars.delete_all_sure=='yes':
try:
db(query).delete()
response.flash='records deleted'
except: response.flash='invalid SQL FILTER'
elif request.vars.update_string:
try:
env=dict(db=db,query=query)
exec('db(query).update('+request.vars.update_string+')') in env
response.flash='records updated'
except: response.flash='invalid SQL FILTER or UPDATE STRING'
if request.vars.start: start=int(request.vars.start)
else: start=0
limitby=(start,start+100)
try:
records=db(query).select(limitby=limitby)
except:
response.flash='invalid SQL FILTER'
return dict(records='no records',nrecords=0,query=query,start=0)
linkto=URL(r=request,f='update/%s'% (dbname))
upload=URL(r=request,f='download')
return dict(start=start,query=query,\
nrecords=len(records),\
records=SQLTABLE(records,linkto,upload,_class='sortable'))
###########################################################
### edit delete one record
############################################################
def update():
try:
dbname=request.args[0]
db=eval(dbname)
table=request.args[1]
except: redirect(URL(r=request,f='index'))
try:
id=int(request.args[2])
record=db(db[table].id==id).select()[0]
except: redirect(URL(r=request,f='select/%s/%s'%(dbname,table)))
form=SQLFORM(db[table],record,deletable=True,
linkto=URL(r=request,f='select/'+dbname),
upload=URL(r=request,f='download/'))
if form.accepts(request.vars,session):
response.flash='done!'
redirect(URL(r=request,f='select/%s/%s'%(dbname,table)))
return dict(form=form)
def cleanup():
app=request.application
files=listdir('applications/%s/cache/' % app,'',0)
for file in files: os.unlink(file)
files=listdir('applications/%s/errors/' % app,'',0)
for file in files: os.unlink(file)
files=listdir('applications/%s/sessions/' % app,'',0)
for file in files: os.unlink(file)
session.flash="cache, errors and sessions cleaned"
redirect(URL(r=request,f='index'))
def setup():
response.view='manage/setup.html'
form=SQLFORM(store.info,mystore)
if form.accepts(request.vars,session):
response.flash='that was easy! now go vist your store.'
else:
response.flash='welcome to the store-in-a-stick setup'
return dict(form=form) | [
[
1,
0,
0.0278,
0.0056,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0333,
0.0056,
0,
0.66,
0.0625,
919,
0,
1,
0,
0,
919,
0,
0
],
[
1,
0,
0.0389,
0.0056,
0,
... | [
"import os",
"from gluon.contenttype import contenttype",
"from gluon.fileutils import check_credentials, listdir",
"if not session.authorized and not request.function=='login':\n redirect(URL(r=request,f='login'))",
" redirect(URL(r=request,f='login'))",
"response.view='manage.html'",
"response.m... |
#!/usr/bin/python2.4
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''The setup and build script for the python-twitter library.'''
__author__ = 'python-twitter@googlegroups.com'
__version__ = '0.8.5'
# The base package metadata to be used by both distutils and setuptools
METADATA = dict(
name = "python-twitter",
version = __version__,
py_modules = ['twitter'],
author='The Python-Twitter Developers',
author_email='python-twitter@googlegroups.com',
description='A python wrapper around the Twitter API',
license='Apache License 2.0',
url='https://github.com/bear/python-twitter',
keywords='twitter api',
)
# Extra package metadata to be used only if setuptools is installed
SETUPTOOLS_METADATA = dict(
install_requires = ['setuptools', 'simplejson', 'oauth2'],
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet',
],
test_suite = 'twitter_test.suite',
)
def Read(file):
return open(file).read()
def BuildLongDescription():
return '\n'.join([Read('README.md'), Read('CHANGES')])
def Main():
# Build the long_description from the README and CHANGES
METADATA['long_description'] = BuildLongDescription()
# Use setuptools if available, otherwise fallback and use distutils
try:
import setuptools
METADATA.update(SETUPTOOLS_METADATA)
setuptools.setup(**METADATA)
except ImportError:
import distutils.core
distutils.core.setup(**METADATA)
if __name__ == '__main__':
Main()
| [
[
8,
0,
0.2329,
0.0137,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.2603,
0.0137,
0,
0.66,
0.125,
777,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.274,
0.0137,
0,
0.66,
... | [
"'''The setup and build script for the python-twitter library.'''",
"__author__ = 'python-twitter@googlegroups.com'",
"__version__ = '0.8.5'",
"METADATA = dict(\n name = \"python-twitter\",\n version = __version__,\n py_modules = ['twitter'],\n author='The Python-Twitter Developers',\n author_email='pyth... |
#!/usr/bin/python2.4
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl
except:
from cgi import parse_qsl
import oauth2 as oauth
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
consumer_key = None
consumer_secret = None
if consumer_key is None or consumer_secret is None:
print 'You need to edit this script and provide values for the'
print 'consumer_key and also consumer_secret.'
print ''
print 'The values you need come from Twitter - you need to register'
print 'as a developer your "application". This is needed only until'
print 'Twitter finishes the idea they have of a way to allow open-source'
print 'based libraries to have a token that can be used to generate a'
print 'one-time use key that will allow the library to make the request'
print 'on your behalf.'
print ''
sys.exit(1)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
print 'Requesting temp token from Twitter'
resp, content = oauth_client.request(REQUEST_TOKEN_URL, 'GET')
if resp['status'] != '200':
print 'Invalid respond from Twitter requesting temp token: %s' % resp['status']
else:
request_token = dict(parse_qsl(content))
print ''
print 'Please visit this Twitter page and retrieve the pincode to be used'
print 'in the next step to obtaining an Authentication Token:'
print ''
print '%s?oauth_token=%s' % (AUTHORIZATION_URL, request_token['oauth_token'])
print ''
pincode = raw_input('Pincode? ')
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
token.set_verifier(pincode)
print ''
print 'Generating and signing request for an access token'
print ''
oauth_client = oauth.Client(oauth_consumer, token)
resp, content = oauth_client.request(ACCESS_TOKEN_URL, method='POST', body='oauth_callback=oob&oauth_verifier=%s' % pincode)
access_token = dict(parse_qsl(content))
if resp['status'] != '200':
print 'The request for a Token did not succeed: %s' % resp['status']
print access_token
else:
print 'Your Twitter Access Token key: %s' % access_token['oauth_token']
print ' Access Token secret: %s' % access_token['oauth_token_secret']
print ''
| [
[
1,
0,
0.1978,
0.011,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.2088,
0.011,
0,
0.66,
0.0625,
509,
0,
1,
0,
0,
509,
0,
0
],
[
7,
0,
0.2582,
0.044,
0,
0.6... | [
"import os",
"import sys",
"try:\n from urlparse import parse_qsl\nexcept:\n from cgi import parse_qsl",
" from urlparse import parse_qsl",
" from cgi import parse_qsl",
"import oauth2 as oauth",
"REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'",
"ACCESS_TOKEN_URL = 'https://api... |
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is False, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is True, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is True, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is True, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is True, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError("%r is not JSON serializable" % (o,))
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor- and/or
# platform-specific, so do tests which don't depend on the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError("Out of range float values are not JSON compliant: %r"
% (o,))
return text
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif isinstance(key, (int, long)):
key = str(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif _skipkeys:
continue
else:
raise TypeError("key %r is not a string" % (key,))
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| [
[
8,
0,
0.0035,
0.0046,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0069,
0.0023,
0,
0.66,
0.0667,
540,
0,
1,
0,
0,
540,
0,
0
],
[
7,
0,
0.015,
0.0092,
0,
0.66,... | [
"\"\"\"Implementation of JSONEncoder\n\"\"\"",
"import re",
"try:\n from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii\nexcept ImportError:\n c_encode_basestring_ascii = None",
" from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii",... |
"r\"\"\"JSON (JavaScript Object Notation) <http://json.org> is a subset of\nJavaScript syntax (ECMA-(...TRUNCATED) | [[8.0,0.0,0.1582,0.3133,0.0,0.66,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0],[14.0,0.0,0.3165,0.0032,0.0,0.(...TRUNCATED) | ["r\"\"\"JSON (JavaScript Object Notation) <http://json.org> is a subset of\nJavaScript syntax (ECMA(...TRUNCATED) |
End of preview. Expand
in Data Studio
Dataset Card for Dataset Name
Dataset Details
Dataset Description
- Curated by: [More Information Needed]
- Funded by [optional]: [More Information Needed]
- Shared by [optional]: [More Information Needed]
- Language(s) (NLP): en
- License: unknown
Dataset Sources [optional]
- Repository: [More Information Needed]
- Paper [optional]: [More Information Needed]
- Demo [optional]: [More Information Needed]
Uses
Direct Use
[More Information Needed]
Out-of-Scope Use
[More Information Needed]
Dataset Structure
[More Information Needed]
Dataset Creation
Curation Rationale
[More Information Needed]
Source Data
Data Collection and Processing
[More Information Needed]
Who are the source data producers?
[More Information Needed]
Annotations [optional]
Annotation process
[More Information Needed]
Who are the annotators?
[More Information Needed]
Personal and Sensitive Information
[More Information Needed]
Bias, Risks, and Limitations
[More Information Needed]
Recommendations
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
Citation [optional]
BibTeX:
[More Information Needed]
APA:
[More Information Needed]
Glossary [optional]
[More Information Needed]
More Information [optional]
[More Information Needed]
Dataset Card Authors [optional]
[More Information Needed]
Dataset Card Contact
[More Information Needed]
- Downloads last month
- 10