diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 5f12078..6ca85a7 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -1,7 +1,7 @@ # This workflow will install Python dependencies, run tests and lint with a single version of Python # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python -name: Python application +name: Unit Tests on: push: @@ -34,7 +34,7 @@ jobs: python -m pip install --upgrade pip pip install -U . pip install -U ".[test]" - pip install flake8 pytest + pip install flake8 pytest nose if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | diff --git a/.gitignore b/.gitignore index f24cd99..a02076f 100644 --- a/.gitignore +++ b/.gitignore @@ -20,8 +20,11 @@ pip-log.txt .coverage .tox -#Translations +# Translations *.mo -#Mr Developer +# Mr Developer .mr.developer.cfg + +# PiCaS DB information +scripts/picasconfig.py diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 4c29eed..0000000 --- a/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: python -sudo: false -python: - - "2.6" - - "2.7" - - "3.4" - - "3.5" -install: - - pip install -U pip - - pip install -U . - - pip install -U ".[test]" - - pip install coverage -script: - - nosetests --with-coverage --cover-package=picas tests - - if [[ $TRAVIS_PYTHON_VERSION != 2.6* ]]; then flake8 picas; fi diff --git a/README.md b/README.md index c7f1bfa..2ea7654 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,20 @@ picasclient -=========== +----------- + +![CICD](https://github.com/sara-nl/picasclient/actions/workflows/python-app.yml/badge.svg) Python client using CouchDB as a token pool server. -## Installation +Installation +============ -Run +To install run ``` pip install -U . ``` -## Testing +Testing +======= First, install the test dependencies with ``` @@ -22,10 +26,128 @@ flake8 picas tests nosetests tests ``` +Examples +======== + +## Setting up the examples + +The scripts directory contains examples to use the picasclient. There are examples for running locally (laptop, cluster login), slurm and the Grid (https://www.egi.eu/). +To run the examples, first you need to have a CouchDB instance running that functions as the token broker that keeps the tokens and then worker machines can approach the broker to get work. To set up this db, see the [SURF documentation](https://doc.grid.surfsara.nl/en/latest/Pages/Practices/picas/picas_overview.html#picas-server-1). + +Once this server is running, you can run the PiCaS examples: + - Local + - Slurm + - Grid + +To approach the DB, you have to fill in the `script/picasconfig.py` with the information to log in to your CouchDB instance and the database you want use for storing the work tokens. + +Next you have to send some tokens with work to the CouchDB instance. You can send two types of work in this example. For very fast running jobs, send the `quickExample.txt` file with: + +``` +python pushTokens.py quickExample.txt +``` + + +Now we are ready to run the examples! + +## Running locally + +To run the local example do: + +``` +python local-example.py +``` + +If all goes well you should see output like: + +``` +----------------------- +Working on token: token_0 +_id token_0 +_rev 4-8b04da64c0a536bb88a3cdebe12e0a87 +type token +lock 1692692693 +done 0 +hostname ui-01.spider.surfsara.nl +scrub_count 0 +input echo "bash-echo" +exit_code 0 +----------------------- +``` + +The token in de database will have attachments with the regular and error output of the terminal. There will find the output file `logs_token_0.out`, containing the output of the input command: + +``` +echo "bash-echo" +>>> bash-echo +``` + +Once the script is running, it will start polling the CouchDB instance for work. Once the work is complete, the script will finish. + +## Running on Slurm + +To run on slurm, first open the `slurm-example.sh` file and make sure your python virtual env or conda/mamba environment is loaded. +Then you have to add tokens to CouchDB using the same setup procedure as mentioned above, with the pushTokens methods. + +To start the slurm job that runs the PiCaS client do: + +``` +sbatch slurm-example.sh +``` + +Now in a slurm job array (you can set the number of array jobs in the script at `--array`) and each job will start polling the CouchDB instance for work. Once the work is complete, the jobs will finish. + +## Running on Grid + +First we need to create a tar of the picas code, so that it can be sent to the Grid: + +``` +tar cfv grid-sandbox/picas.tar ../picas/ +``` + +Secondly, the CouchDB python API needs to be available too, so download and extract it: + +``` +wget https://files.pythonhosted.org/packages/7c/c8/f94a107eca0c178e5d74c705dad1a5205c0f580840bd1b155cd8a258cb7c/CouchDB-1.2.tar.gz +``` + +Now you can start the example from a grid login node with (in this case DIRAC is used for job submission): + +``` +dirac-wms-job-submit fractals.jdl +``` + +And the status and output can be retrieved with the usual DIRAC commands, while in the token you see the status of the token and the attachments with the log files. + +## Running the long jobs + +To send longer running code (it takes up to 30 minutes per token), do: + +``` +./createTokens +>>> /tmp/tmp.JoLqcdYZRD +``` + +And pass the output file to the push tokens code: + +``` +python pushTokens.py /tmp/tmp.abc123 +``` + +Now the tokens are available in the database. Now, the binary for the calculation needs to be built: + +``` +cc src/fractals.c -o bin/fractals -lm +``` + +And finally, the `*-example.py` code needs to call a different command: + +``` +command = "/usr/bin/time -v ./process_task.sh " + "\"" +token['input'] + "\" " + token['_id'] + " 2> logs_" + str(token['_id']) + ".err 1> logs_" + str(token['_id']) + ".out" +``` -## Travis build status +So adjust the `*-example.py` python code for whichever way you want to run it (locally, slurm, grid) and start running the way described above! -[![Build Status](https://travis-ci.org/sara-nl/picasclient.svg?branch=master)](https://travis-ci.org/sara-nl/picasclient) ## QuantifiedCode Automated code review diff --git a/examples/createTokens b/examples/createTokens new file mode 100755 index 0000000..947e86b --- /dev/null +++ b/examples/createTokens @@ -0,0 +1,67 @@ +#!/bin/bash + +PARAM_Q_MIN=100 +PARAM_Q_MAX=260 +PARAM_Q_STEP=84 + +PARAM_D_MIN=256 +PARAM_D_MAX=8192 +PARAM_D_STEP=2024 + +PARAM_M_MIN=400 +PARAM_M_MAX=10000 +PARAM_M_STEP=4000 + +function testNumerical { + if [ $1 -eq $1 2> /dev/null ]; then + return + fi + echo "$1 is not numerical!" + exit 1 +} + +while getopts "q:d:m:" opt; do + testNumerical $OPTARG + case $opt in + q) + PARAM_Q_STEP=$OPTARG + ;; + d) + PARAM_D_STEP=$OPTARG + ;; + m) + PARAM_M_STEP=$OPTARG + ;; + esac +done + +PARAMFILE=$( mktemp ) + +PARAM_Q=$PARAM_Q_MIN +PARAM_D=$PARAM_D_MIN +PARAM_M=$PARAM_M_MIN + +STR_PARAM_Q="" + +while [ $PARAM_Q_MAX -ge $PARAM_Q ]; do + # Ugly hack in order not to have to use bc + if [ $[ $PARAM_Q / 10 ] -eq 0 ]; then + STR_PARAM_Q="0.00${PARAM_Q}" + elif [ $[ $PARAM_Q / 100 ] -eq 0 ]; then + STR_PARAM_Q="0.0${PARAM_Q}" + else + STR_PARAM_Q="0.${PARAM_Q}" + fi + while [ $PARAM_D_MAX -ge $PARAM_D ]; do + while [ $PARAM_M_MAX -ge $PARAM_M ]; do + echo "-q ${STR_PARAM_Q} -d ${PARAM_D} -m ${PARAM_M}" >> $PARAMFILE + PARAM_M=$[ $PARAM_M+$PARAM_M_STEP ] + done + PARAM_M=$PARAM_M_MIN + PARAM_D=$[ $PARAM_D+$PARAM_D_STEP ] + done + PARAM_D=$PARAM_D_MIN + PARAM_Q=$[ $PARAM_Q+$PARAM_Q_STEP ] +done + +echo ${PARAMFILE} diff --git a/scripts/example.py b/examples/example-template.py similarity index 100% rename from scripts/example.py rename to examples/example-template.py diff --git a/examples/fractals.jdl b/examples/fractals.jdl new file mode 100644 index 0000000..3c7935b --- /dev/null +++ b/examples/fractals.jdl @@ -0,0 +1,12 @@ +[ + ParameterStart=0; + ParameterStep=1; + Parameters=5; + + Executable = "/bin/sh"; + Arguments = "startpilot.sh"; + Stdoutput = "parametricjob.out"; + StdError = "parametricjob.err"; + InputSandbox = {"grid-sandbox/CouchDB-1.2.tar.gz", "grid-sandbox/picas.tar", "grid-sandbox/startpilot.sh", "grid-sandbox/grid-example.py", "grid-sandbox/process_task.sh", "bin/fractals", "picasconfig.py"}; + OutputSandbox = {"parametricjob.out", "parametricjob.err"}; +] diff --git a/examples/grid-sandbox/grid-example.py b/examples/grid-sandbox/grid-example.py new file mode 100755 index 0000000..a869632 --- /dev/null +++ b/examples/grid-sandbox/grid-example.py @@ -0,0 +1,80 @@ +''' +@helpdesk: SURF helpdesk + +usage: python grid-example.py +description: + Connect to PiCaS server + Get the next token in todo View + Fetch the token parameters, e.g. input value + Run main job (process_task.sh) with the input argument + When done, return the exit code to the token + Attach the logs to the token + + +''' + +#python imports +import os +import time +import couchdb +import picasconfig + +#picas imports +from picas.actors import RunActor +from picas.clients import CouchDB +from picas.iterators import TaskViewIterator +from picas.modifiers import BasicTokenModifier +from picas.executers import execute + +class ExampleActor(RunActor): + def __init__(self, db, modifier, view="todo", **viewargs): + super(ExampleActor, self).__init__(db, view=view, **viewargs) + self.modifier = modifier + self.client = db + + def process_task(self, token): + # Print token information + print("-----------------------") + print("Working on token: " +token['_id']) + for key, value in token.doc.items(): + print(key, value) + print("-----------------------") + + # Start running the main job + # /usr/bin/time -v ./process_task.sh [input] [tokenid] 2> logs_[token_id].err 1> logs_[token_id].out + command = "/usr/bin/time -v ./process_task.sh " + "\"" +token['input'] + "\" " + token['_id'] + " 2> logs_" + str(token['_id']) + ".err 1> logs_" + str(token['_id']) + ".out" + + out = execute(command,shell=True) + + ## Get the job exit code in the token + token['exit_code'] = out[0] + token = self.modifier.close(token) + #self.client.db[token['_id']] = token # necessary? + + # Attach logs in token + curdate = time.strftime("%d/%m/%Y_%H:%M:%S_") + try: + logsout = "logs_" + str(token['_id']) + ".out" + log_handle = open(logsout, 'rb') + token.put_attachment(logsout, log_handle.read()) + + logserr = "logs_" + str(token['_id']) + ".err" + log_handle = open(logserr, 'rb') + token.put_attachment(logserr, log_handle.read()) + except: + print("excepted attachemnt") + pass + +def main(): + # setup connection to db + client = CouchDB(url=picasconfig.PICAS_HOST_URL, db=picasconfig.PICAS_DATABASE, username=picasconfig.PICAS_USERNAME, password=picasconfig.PICAS_PASSWORD) + print("Connected to the database %s sucessfully. Now starting work..." %(picasconfig.PICAS_DATABASE)) + # Create token modifier + modifier = BasicTokenModifier() + # Create actor + actor = ExampleActor(client, modifier) + # Start work! + actor.run() + +if __name__ == '__main__': + main() diff --git a/examples/grid-sandbox/process_task.sh b/examples/grid-sandbox/process_task.sh new file mode 100755 index 0000000..aa907be --- /dev/null +++ b/examples/grid-sandbox/process_task.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +#@helpdesk: SURF helpdesk +# +# usage: ./process_task.sh [input] [tokenid] + + +#Enable verbosity +set -x + +#Obtain information for the Worker Node +echo "" +echo `date` +echo ${HOSTNAME} + +#Initialize job arguments +INPUT=$1 +TOKENID=$2 +OUTPUT=output_${TOKENID} +echo $INPUT +echo $TOKENID +echo $OUTPUT + +#Start processing +./fractals -o $OUTPUT $INPUT +if [[ "$?" != "0" ]]; then + echo "Program interrupted. Exit now..." + exit 1 +fi + +#Copy output to the grid storage +#globus-url-copy file:///${PWD}/${OUTPUT} gsiftp://gridftp.grid.sara.nl:2811/pnfs/grid.sara.nl/data/lsgrid/homer/${OUTPUT} + +echo `date` + +exit 0 diff --git a/examples/grid-sandbox/startpilot.sh b/examples/grid-sandbox/startpilot.sh new file mode 100755 index 0000000..14a8721 --- /dev/null +++ b/examples/grid-sandbox/startpilot.sh @@ -0,0 +1,23 @@ +# @helpdesk: SURF helpdesk +# +# usage: . startpilot.sh +# description: +# Configure PiCaS environment for the communication with couchDB +# Start the pilot job + + +set -x + +JOBDIR=${PWD} # the directory where the job lands + +tar -xvf ${JOBDIR}/picas.tar +tar -xvf ${JOBDIR}/CouchDB-1.2.tar.gz CouchDB-1.2/couchdb && mv CouchDB-1.2/couchdb couchdb && rmdir CouchDB-1.2 + +echo "Start the pilot job tasks by contacting PiCaS tokens" + +# set permissions for the process_task script and fractals executable +chmod u+x ${JOBDIR}/process_task.sh +chmod u+x ${JOBDIR}/fractals +ls -l ${JOBDIR} + +python ${JOBDIR}/grid-example.py diff --git a/examples/local-example.py b/examples/local-example.py new file mode 100755 index 0000000..03a238f --- /dev/null +++ b/examples/local-example.py @@ -0,0 +1,79 @@ +''' +@helpdesk: SURF helpdesk + +usage: python local-example.py +description: + Connect to PiCaS server + Get the next token in todo View + Fetch the token parameters, e.g. input value + Run main job (process_task.sh) with the input argument + When done, return the exit code to the token + Attach the logs to the token + + +''' + +#python imports +import os +import time +import couchdb +import picasconfig + +#picas imports +from picas.actors import RunActor +from picas.clients import CouchDB +from picas.iterators import TaskViewIterator +from picas.modifiers import BasicTokenModifier +from picas.executers import execute + +class ExampleActor(RunActor): + def __init__(self, db, modifier, view="todo", **viewargs): + super(ExampleActor, self).__init__(db, view=view, **viewargs) + self.modifier = modifier + self.client = db + + def process_task(self, token): + # Print token information + print("-----------------------") + print("Working on token: " +token['_id']) + for key, value in token.doc.items(): + print(key, value) + print("-----------------------") + + # Start running the main job + # /usr/bin/time -v ./process_task.sh [input] [tokenid] 2> logs_[token_id].err 1> logs_[token_id].out + command = "/usr/bin/time -v ./process_task.sh " + "\"" +token['input'] + "\" " + token['_id'] + " 2> logs_" + str(token['_id']) + ".err 1> logs_" + str(token['_id']) + ".out" + + out = execute(command,shell=True) + + ## Get the job exit code in the token + token['exit_code'] = out[0] + token = self.modifier.close(token) + #self.client.db[token['_id']] = token # necessary? + + # Attach logs in token + curdate = time.strftime("%d/%m/%Y_%H:%M:%S_") + try: + logsout = "logs_" + str(token['_id']) + ".out" + log_handle = open(logsout, 'rb') + token.put_attachment(logsout, log_handle.read()) + + logserr = "logs_" + str(token['_id']) + ".err" + log_handle = open(logserr, 'rb') + token.put_attachment(logserr, log_handle.read()) + except: + pass + +def main(): + # setup connection to db + client = CouchDB(url=picasconfig.PICAS_HOST_URL, db=picasconfig.PICAS_DATABASE, username=picasconfig.PICAS_USERNAME, password=picasconfig.PICAS_PASSWORD) + print("Connected to the database %s sucessfully. Now starting work..." %(picasconfig.PICAS_DATABASE)) + # Create token modifier + modifier = BasicTokenModifier() + # Create actor + actor = ExampleActor(client, modifier) + # Start work! + actor.run() + +if __name__ == '__main__': + main() diff --git a/examples/picasconfig.py b/examples/picasconfig.py new file mode 100644 index 0000000..b92550a --- /dev/null +++ b/examples/picasconfig.py @@ -0,0 +1,4 @@ +PICAS_HOST_URL="" +PICAS_DATABASE="" +PICAS_USERNAME="" +PICAS_PASSWORD="" diff --git a/examples/process_task.sh b/examples/process_task.sh new file mode 100755 index 0000000..96e6218 --- /dev/null +++ b/examples/process_task.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +#@helpdesk: SURF helpdesk +# +# usage: ./process_task.sh [input] [tokenid] + + +#Enable verbosity +set -x + +#Obtain information for the Worker Node +echo "" +echo `date` +echo ${HOSTNAME} + +#Initialize job arguments +INPUT=$1 +TOKENID=$2 +OUTPUT=output_${TOKENID} +echo $INPUT +echo $TOKENID +echo $OUTPUT + +#Start processing +eval $INPUT +if [[ "$?" != "0" ]]; then + echo "Program interrupted. Exit now..." + exit 1 +fi + +#Copy output to the grid storage +#globus-url-copy file:///${PWD}/${OUTPUT} gsiftp://gridftp.grid.sara.nl:2811/pnfs/grid.sara.nl/data/lsgrid/homer/${OUTPUT} + +echo `date` + +exit 0 diff --git a/examples/pushTokens.py b/examples/pushTokens.py new file mode 100644 index 0000000..c531f77 --- /dev/null +++ b/examples/pushTokens.py @@ -0,0 +1,60 @@ +''' +@helpdesk: SURF helpdesk + +usage: python pushTokens.py [path to tokens file] +description: + Connects to PiCaS server + Creates one token for each line in [tokens file] + Loads the tokens +''' + +import sys +import os +import couchdb +import random +import picasconfig + +def getNextIndex(): + db = get_db() + + index = 0 + while db.get(f"token_{index}") is not None: + index+=1 + + return index + +def loadTokens(db): + tokens = [] + tokensfile = sys.argv[1] + with open(tokensfile) as f: + input = f.read().splitlines() + + i = getNextIndex() + for fractal in input: + token = { + '_id': 'token_' + str(i), + 'type': 'token', + 'lock': 0, + 'done': 0, + 'hostname': '', + 'scrub_count': 0, + 'input': fractal, + 'exit_code': '' + } + tokens.append(token) + i = i +1 + db.update(tokens) + +def get_db(): + server = couchdb.Server(picasconfig.PICAS_HOST_URL) + username = picasconfig.PICAS_USERNAME + pwd = picasconfig.PICAS_PASSWORD + server.resource.credentials = (username,pwd) + db = server[picasconfig.PICAS_DATABASE] + return db + +if __name__ == '__main__': + #Create a connection to the server + db = get_db() + #Load the tokens to the database + loadTokens(db) diff --git a/examples/quickExample.txt b/examples/quickExample.txt new file mode 100644 index 0000000..b1ff1f3 --- /dev/null +++ b/examples/quickExample.txt @@ -0,0 +1,3 @@ +echo 'this is token 1' +echo 'this is token 2' +echo 'this is token 3' diff --git a/examples/slurm-example.sh b/examples/slurm-example.sh new file mode 100755 index 0000000..21b5bb1 --- /dev/null +++ b/examples/slurm-example.sh @@ -0,0 +1,21 @@ +#!/bin/bash +#SBATCH --array=0-5 + + +#@helpdesk: SURF helpdesk +# +#usage: sbatch slurm-example.sh +#description: +# Connect to PiCaS server +# Get the next token in todo View +# Fetch the token parameters, e.g. input value +# Run main job (process_task.sh) with the input argument +# When done, return the exit code to the token +# Attach the logs to the token + + +cd $PWD +# You need to load your environment here +# mamba activate MAMBA-ENV +# source /PATH/TO/VENV/bin/activate +python local-example.py diff --git a/examples/src/fractals.c b/examples/src/fractals.c new file mode 100644 index 0000000..d50583f --- /dev/null +++ b/examples/src/fractals.c @@ -0,0 +1,96 @@ +#include +#include +#include +#include + +#define XSIZE 3200 +#define YSIZE 3200 +#define MAXITER 1500 +#define PI 3.141592654 + + +void color(int red, int green, int blue, FILE *ofp){ + fputc((char)red, ofp); + fputc((char)green, ofp); + fputc((char)blue, ofp); +} + +int main(int argc, char *argv[]) { + + double xmin = -1.5, xmax = 1.5, ymin = -1.2, ymax = 1.2; + double deltax, deltay, x, y, x0, y0; + double p = -0.5101, q = 0.5101, radius, zr, zi; + int xsize=XSIZE; + int ysize=YSIZE; + int maxiter=MAXITER; + int c; + char default_filename[] = "fractal.ppm"; + char *filename; + char filenameSet = 0; + extern char *optarg; + extern int optopt; + + while ((c = getopt(argc, argv, "q:d:m:o:")) != -1) { + switch(c) { + case 'q': + q = strtod( optarg, NULL ); + p = q * -1.0; + break; + case 'd': + xsize = atoi(optarg); + ysize = xsize; + break; + case 'm': + maxiter = atoi(optarg); + break; + case 'o': + filename = optarg; + filenameSet = 1; + break; + case '?': + fprintf(stderr, "Unrecognized option: -%c\n", optopt); + } + } + + if ( 0 == filenameSet ) { + filename = default_filename; + } + + int color_index, xpix, ypix; + int colorred, colorgreen, colorblue; + FILE *ofp; + + ofp = fopen(filename, "w"); + fprintf(ofp,"P6\n# Y2K Compliant / Julia Set\n"); + fprintf(ofp,"%d %d\n255\n",xsize,ysize); + deltax = (xmax - xmin)/(xsize-1); + deltay = (ymax - ymin)/(ysize-1); + for (xpix = 0; xpix < xsize; xpix++){ + x0 = xmin + (double)xpix*deltax; + for (ypix = 0; ypix < ysize; ypix++){ + y0 = ymin + (double)ypix*deltay; + //colorred = (int)(255*sin( fabs(y0) / sqrt(y0*y0 + x0*x0) )); + //colorgreen = (int)(255*cos( fabs(x0) / sqrt(y0*y0 + x0*x0) )); + //colorblue = 0; + x = x0; y = y0; + color_index = 0; + do{ + zr = x*x-y*y + p; + zi = 2.0*x*y + q; + radius = zr*zr + zi*zi; + //colorred = (int)(255*sin( fabs(ypix) / radius )); + //colorgreen = (int)(255*cos( fabs(xpix) / radius )); + //colorblue = (int)(255*tan( fabs(xpix-ypix) / radius )); + colorred = 0; + colorgreen = (int)(x*ypix) % 255; + colorblue = (int)(y*xpix) % 255; + color_index++; + if (radius >= maxiter) color(255, 255, 255, ofp); + //if (color_index == maxiter) color(0, (int)(x*ypix) % 255, (int)(y*xpix) % 255, ofp); + if (color_index == maxiter) color( colorred, colorgreen, colorblue, ofp); + x = zr; y = zi; + } while (radius <= maxiter && color_index <= maxiter); + } + } + fclose(ofp); +} diff --git a/picas/documents.py b/picas/documents.py index c3ea855..fa63c88 100644 --- a/picas/documents.py +++ b/picas/documents.py @@ -96,7 +96,7 @@ def put_attachment(self, name, data, mimetype=None): b64data = base64.b64encode(data) self.doc['_attachments'][name] = { - 'content_type': mimetype, 'data': b64data} + 'content_type': mimetype, 'data': b64data.decode()} def get_attachment(self, name, retrieve_from_database=None): ''' Gets an attachment dict from the document. diff --git a/setup.py b/setup.py index ae15bba..a4e13a2 100755 --- a/setup.py +++ b/setup.py @@ -8,15 +8,15 @@ setup(name='picas', version='0.2.10', description='Python client using CouchDB as a token pool server.', - author='Jan Bot,Joris Borgdorff', - author_email='helpdesk@surfsara.nl', + author='Jan Bot,Joris Borgdorff,Lodewijk Nauta', + author_email='helpdesk@surf.nl', url='https://github.com/sara-nl/picasclient', download_url='https://github.com/sara-nl/picasclient/tarball/0.2.10', packages=['picas'], install_requires=['couchdb'], license="MIT", extras_require={ - 'test': ['flake8', 'nose'], + 'test': ['flake8', 'pytest'], }, classifiers=[ "Development Status :: 4 - Beta", diff --git a/tests/test_document.py b/tests/test_document.py index 60d8e6a..ff6513a 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -36,7 +36,7 @@ def test_attachment(): assert_equals(attach['content_type'], 'text/plain') assert_equals(attach['data'], data) assert_equals(doc['_attachments']['mytext.txt']['data'], - b'VGhpcyBpcyBpdA==') + 'VGhpcyBpcyBpdA==') doc.remove_attachment('mytext.txt') assert_true('mytext.txt' not in doc['_attachments']) assert_equals(attach['data'], data)