From 39e521def084b0f5c3a1f1f0fa346e47b2b2f6ad Mon Sep 17 00:00:00 2001 From: Franco Fichtner Date: Mon, 28 Mar 2022 12:42:29 +0200 Subject: [PATCH] */*: sync with upstream Taken from: FreeBSD --- accessibility/caribou/Makefile | 2 +- archivers/file-roller/Makefile | 2 +- archivers/py-bup/Makefile | 6 +- archivers/py-bup/distinfo | 6 +- archivers/py-bup/files/patch-Makefile | 12 +- archivers/py-bup/files/patch-config_configure | 15 +- ...es.sh => patch-test_ext_test-sparse-files} | 6 +- archivers/py-zopfli/Makefile | 8 +- archivers/py-zopfli/distinfo | 6 +- archivers/xar/Makefile | 1 + archivers/xarchiver/Makefile | 1 + astro/foxtrotgps/Makefile | 2 +- astro/gpscorrelate/Makefile | 2 +- astro/kosmorro/Makefile | 17 +- astro/kosmorro/distinfo | 6 +- astro/kosmorro/pkg-plist | 145 +- astro/py-kosmorrolib/Makefile | 2 +- astro/py-kosmorrolib/distinfo | 6 +- astro/py-pysofa/files/patch-2to3 | 182 + astro/py-ro/files/patch-2to3 | 191 + audio/abgate-lv2/Makefile | 2 +- audio/amarok/Makefile | 2 +- audio/aqualung/Makefile | 2 +- audio/ardour6/Makefile | 2 +- audio/ario/Makefile | 1 + audio/denemo/Makefile | 2 +- audio/easytag/Makefile | 2 +- audio/eq10q-lv2/Makefile | 2 +- audio/ezstream/Makefile | 2 +- audio/fluidsynth/Makefile | 2 +- audio/fluidsynth/distinfo | 6 +- audio/fluidsynth/pkg-plist | 2 +- audio/forked-daapd/Makefile | 2 +- audio/ganv/Makefile | 1 + audio/gbemol/Makefile | 2 +- audio/glurp/Makefile | 2 +- audio/gmpc-discogs/Makefile | 2 +- audio/gmpc-extraplaylist/Makefile | 2 +- audio/gmpc-lastfm/Makefile | 2 +- audio/gmpc-lyrics/Makefile | 2 +- audio/gmpc-lyricsplugin/Makefile | 2 +- audio/gmpc-magnatune/Makefile | 2 +- audio/gmpc-mdcover/Makefile | 2 +- audio/gmpc-mserver/Makefile | 2 +- audio/gmpc-shout/Makefile | 2 +- audio/gmpc/Makefile | 2 +- audio/gmtp/Makefile | 1 + audio/gnaural/Makefile | 2 +- audio/gsequencer/Makefile | 1 + audio/gstreamer1-plugins-a52dec/Makefile | 2 +- audio/gstreamer1-plugins-amrnb/Makefile | 2 +- audio/gstreamer1-plugins-amrwbdec/Makefile | 2 +- audio/gstreamer1-plugins-bs2b/Makefile | 2 +- audio/gstreamer1-plugins-cdparanoia/Makefile | 2 +- audio/gstreamer1-plugins-chromaprint/Makefile | 2 +- audio/gstreamer1-plugins-faac/Makefile | 2 +- audio/gstreamer1-plugins-faad/Makefile | 2 +- audio/gstreamer1-plugins-flac/Makefile | 2 +- audio/gstreamer1-plugins-flite/Makefile | 2 +- audio/gstreamer1-plugins-gme/Makefile | 2 +- audio/gstreamer1-plugins-gsm/Makefile | 2 +- audio/gstreamer1-plugins-jack/Makefile | 2 +- audio/gstreamer1-plugins-ladspa/Makefile | 2 +- audio/gstreamer1-plugins-lame/Makefile | 2 +- audio/gstreamer1-plugins-lv2/Makefile | 2 +- audio/gstreamer1-plugins-modplug/Makefile | 2 +- audio/gstreamer1-plugins-mpg123/Makefile | 2 +- audio/gstreamer1-plugins-musepack/Makefile | 2 +- audio/gstreamer1-plugins-ogg/Makefile | 2 +- audio/gstreamer1-plugins-openmpt/Makefile | 2 +- audio/gstreamer1-plugins-opus/Makefile | 2 +- audio/gstreamer1-plugins-pulse/Makefile | 2 +- audio/gstreamer1-plugins-shout2/Makefile | 2 +- audio/gstreamer1-plugins-sidplay/Makefile | 2 +- audio/gstreamer1-plugins-sndfile/Makefile | 2 +- audio/gstreamer1-plugins-soundtouch/Makefile | 2 +- audio/gstreamer1-plugins-speex/Makefile | 2 +- audio/gstreamer1-plugins-taglib/Makefile | 2 +- audio/gstreamer1-plugins-twolame/Makefile | 2 +- audio/gstreamer1-plugins-vorbis/Makefile | 2 +- audio/gstreamer1-plugins-wavpack/Makefile | 2 +- audio/gstreamer1-plugins-webrtcdsp/Makefile | 2 +- audio/gtkguitune/Makefile | 2 +- audio/gtkpod/Makefile | 2 +- audio/guitarix-lv2/Makefile | 1 + audio/icecast-kh/Makefile | 1 + audio/icecast/Makefile | 3 +- audio/icecast/files/patch-configure | 410 ++ audio/ices/Makefile | 1 + audio/jalv-select/Makefile | 1 + audio/jalv/Makefile | 2 +- audio/kid3-qt5/Makefile | 2 +- audio/lame/Makefile | 9 +- audio/lash/Makefile | 2 +- audio/libgpod-sharp/Makefile | 1 + audio/libgpod/Makefile | 2 +- audio/libmusicbrainz5/Makefile | 1 + audio/mate-media/Makefile | 1 + audio/muse-sequencer/Makefile | 2 +- audio/paprefs/Makefile | 1 + audio/pavucontrol/Makefile | 1 + audio/plasma5-plasma-pa/Makefile | 1 + audio/psindustrializer/Makefile | 2 +- audio/pulseeffects/Makefile | 2 +- audio/py-apetag/files/patch-2to3 | 412 ++ audio/py-mpd/files/patch-2to3 | 11 + audio/qtractor/Makefile | 2 +- audio/rhythmbox/Makefile | 2 +- audio/tagtool/Makefile | 2 +- audio/vst3sdk/Makefile | 1 + audio/zrythm/Makefile | 2 +- benchmarks/py-naarad/files/patch-2to3 | 9 + biology/checkm/Makefile | 2 +- biology/checkm/files/patch-2to3 | 648 +++ biology/groopm/files/patch-2to3 | 3414 +++++++++++++ biology/libnuml/Makefile | 1 + biology/libsbml/Makefile | 1 + biology/libsedml/Makefile | 1 + biology/ncbi-vdb/Makefile | 2 +- biology/py-crossmap/files/patch-2to3 | 2955 ++++++++++++ biology/py-ont-fast5-api/Makefile | 4 +- biology/py-ont-fast5-api/distinfo | 6 +- biology/py-ont-fast5-api/files/patch-setup.py | 11 + biology/python-nexus/files/patch-2to3 | 19 + biology/sra-tools/Makefile | 1 + cad/horizon-eda/Makefile | 1 + cad/ifcopenshell/Makefile | 2 +- cad/opencascade/Makefile | 2 +- cad/openscad-devel/Makefile | 2 +- cad/openscad/Makefile | 2 +- cad/openvsp/Makefile | 1 + cad/oregano/Makefile | 2 +- cad/repsnapper/Makefile | 2 +- cad/solvespace/Makefile | 1 + cad/veroroute/Makefile | 2 +- cad/veroroute/distinfo | 6 +- cad/veroroute/pkg-plist | 3 + chinese/fcitx/Makefile | 1 + comms/gnocky/Makefile | 2 +- comms/gstreamer1-plugins-spandsp/Makefile | 2 +- comms/hamlib/Makefile | 1 + comms/py-esptool/Makefile | 6 +- comms/py-esptool/distinfo | 6 +- comms/xastir/Makefile | 1 + converters/p5-Text-Iconv/Makefile | 5 +- converters/p5-Unicode-String/Makefile | 4 + converters/p5-Unicode-UTF8simple/Makefile | 2 + converters/wkhtmltopdf/files/patch-configure | 2 +- converters/xml2c/Makefile | 1 + databases/akonadi/Makefile | 1 + databases/buzhug/Makefile | 2 +- databases/buzhug/files/patch-2to3 | 1032 ++++ databases/evolution-data-server/Makefile | 1 + databases/gmdb2/Makefile | 1 + databases/kbibtex/Makefile | 2 +- databases/libgda5-bdb/Makefile | 2 +- databases/libgda5-jdbc/Makefile | 2 +- databases/libgda5-ldap/Makefile | 2 +- databases/libgda5-mdb/Makefile | 2 +- databases/libgda5-mysql/Makefile | 2 +- databases/libgda5-postgresql/Makefile | 2 +- databases/libgda5-ui/Makefile | 2 +- databases/libgda5/Makefile | 1 + databases/libgdamm5/Makefile | 1 + databases/mariadb103-server/Makefile | 2 +- databases/mariadb104-server/Makefile | 2 +- databases/mariadb105-server/Makefile | 2 +- databases/mariadb106-server/Makefile | 2 +- databases/mydumper/Makefile | 3 +- databases/mydumper/distinfo | 6 +- databases/mydumper/pkg-descr | 16 +- databases/p5-Search-Xapian/Makefile | 2 +- databases/p5-Search-Xapian/distinfo | 6 +- databases/pg_citus/Makefile | 2 +- databases/pg_citus/distinfo | 6 +- databases/pgadmin3/Makefile | 2 +- databases/pgbackrest/Makefile | 1 + databases/pgbarman/Makefile | 2 +- databases/pgbarman/distinfo | 10 +- databases/pgmodeler/Makefile | 1 + databases/pointcloud/Makefile | 2 +- databases/postgis30/Makefile | 1 + databases/postgis31/Makefile | 1 + databases/postgis32/Makefile | 1 + databases/postgresql10-contrib/Makefile | 2 +- databases/postgresql11-contrib/Makefile | 2 +- databases/postgresql12-contrib/Makefile | 2 +- databases/postgresql13-contrib/Makefile | 2 +- databases/postgresql14-contrib/Makefile | 2 +- databases/py-Elixir/files/patch-2to3 | 190 + databases/py-dbf/Makefile | 2 +- databases/py-dbf/files/patch-2to3 | 13 + databases/py-geoalchemy2/Makefile | 2 +- databases/py-geoalchemy2/distinfo | 6 +- databases/py-marshmallow-sqlalchemy/Makefile | 4 +- databases/py-marshmallow-sqlalchemy/distinfo | 6 +- databases/py-motor/files/patch-asyncio | 16 + databases/py-mycli/Makefile | 1 + databases/py-mycli/files/patch-setup.py | 9 + databases/py-pgspecial/Makefile | 2 +- databases/py-pgspecial/distinfo | 6 +- databases/py-psycopg-c/Makefile | 2 +- databases/py-psycopg-c/distinfo | 6 +- databases/py-psycopg/Makefile | 2 +- databases/py-psycopg/distinfo | 6 +- databases/py-sqlalchemy14/Makefile | 2 +- databases/py-sqlalchemy14/distinfo | 6 +- databases/py-sqlalchemy14/pkg-plist | 3 +- databases/py-sqlobject/Makefile | 2 +- databases/py-sqlobject/files/patch-2to3 | 2505 ++++++++++ databases/py-tiledb/Makefile | 6 +- databases/py-tiledb/distinfo | 6 +- databases/py-zodbpickle/files/patch-2to3 | 1029 ++++ databases/qof/Makefile | 2 +- databases/rocksdb/Makefile | 8 +- databases/rocksdb/distinfo | 6 +- databases/rocksdb/files/patch-Makefile | 8 +- databases/rocksdb/pkg-plist | 8 +- databases/rubygem-activemodel52/Makefile | 2 +- databases/rubygem-activemodel52/distinfo | 6 +- databases/rubygem-activemodel60/Makefile | 2 +- databases/rubygem-activemodel60/distinfo | 6 +- databases/rubygem-activemodel61/Makefile | 2 +- databases/rubygem-activemodel61/distinfo | 6 +- databases/rubygem-activemodel70/Makefile | 2 +- databases/rubygem-activemodel70/distinfo | 6 +- databases/rubygem-activerecord52/Makefile | 2 +- databases/rubygem-activerecord52/distinfo | 6 +- databases/rubygem-activerecord60/Makefile | 2 +- databases/rubygem-activerecord60/distinfo | 6 +- databases/rubygem-activerecord61/Makefile | 2 +- databases/rubygem-activerecord61/distinfo | 6 +- databases/rubygem-activerecord70/Makefile | 2 +- databases/rubygem-activerecord70/distinfo | 6 +- databases/sequeler/Makefile | 2 +- databases/spatialite-tools/Makefile | 1 + databases/spatialite/Makefile | 1 + databases/spatialite_gui/Makefile | 1 + databases/sqlitebrowser/Makefile | 2 +- databases/sqliteodbc/Makefile | 2 +- databases/tiledb/Makefile | 2 +- databases/tiledb/distinfo | 6 +- databases/virtuoso/Makefile | 2 +- databases/xrootd/Makefile | 2 +- deskutils/alarm-clock-applet/Makefile | 1 + deskutils/bijiben/Makefile | 2 +- deskutils/bookworm/Makefile | 2 +- deskutils/cairo-dock-plugins/Makefile | 2 +- deskutils/cairo-dock/Makefile | 2 +- deskutils/easystroke/Makefile | 2 +- deskutils/elementary-calendar/Makefile | 1 + deskutils/genius/Makefile | 2 +- deskutils/gnome-calendar/Makefile | 1 + deskutils/gnome-contacts/Makefile | 1 + deskutils/gnome-dictionary/Makefile | 1 + deskutils/gnome-documents/Makefile | 2 +- deskutils/gnome-initial-setup/Makefile | 3 +- deskutils/gnome-initial-setup/pkg-plist | 76 + deskutils/gnome-maps/Makefile | 1 + deskutils/gnome-photos/Makefile | 1 + deskutils/gnome-todo/Makefile | 1 + deskutils/gnome-tweaks/Makefile | 2 +- deskutils/gnote/Makefile | 1 + deskutils/growl-for-linux/Makefile | 2 +- deskutils/gxneur/Makefile | 1 + deskutils/lookbook/Makefile | 2 +- deskutils/notekit/Makefile | 1 + deskutils/osmo/Makefile | 2 +- deskutils/pinot/Makefile | 1 + deskutils/py-autokey/Makefile | 2 +- deskutils/py-py3status/Makefile | 2 +- deskutils/py-py3status/distinfo | 6 +- deskutils/py-pystash/files/patch-2to3 | 50 + deskutils/recoll/Makefile | 1 + deskutils/rubrica/Makefile | 1 + deskutils/synapse/Makefile | 1 + deskutils/sysctlview/Makefile | 1 + deskutils/taskwarrior/Makefile | 2 +- deskutils/taskwarrior/distinfo | 6 +- deskutils/virt-manager/Makefile | 1 + deskutils/xfce4-tumbler/Makefile | 2 +- deskutils/xpad/Makefile | 1 + devel/Makefile | 13 +- devel/R-cran-Rdpack/Makefile | 2 +- devel/R-cran-Rdpack/distinfo | 6 +- devel/R-cran-fansi/Makefile | 2 +- devel/R-cran-fansi/distinfo | 6 +- devel/R-cran-plyr/Makefile | 9 +- devel/R-cran-plyr/distinfo | 6 +- devel/aegis/Makefile | 2 +- devel/anjuta/Makefile | 2 +- devel/appstream/Makefile | 1 + devel/autogen/Makefile | 2 +- devel/caf/Makefile | 2 +- devel/caf/distinfo | 6 +- devel/caf/pkg-plist | 15 +- devel/codeville/Makefile | 2 +- devel/codeville/files/patch-indent | 75 + devel/collada-dom/Makefile | 2 +- devel/compiz-bcop/Makefile | 1 + devel/csoap/Makefile | 2 +- devel/dconf/Makefile | 2 +- devel/dia2code+/Makefile | 2 +- devel/dia2code/Makefile | 1 + devel/efl/Makefile | 31 +- devel/electron13/Makefile | 1 + devel/flang-clang/Makefile | 1 + devel/frama-c/Makefile | 1 + devel/gconf2/Makefile | 2 +- devel/gconfmm26/Makefile | 2 +- devel/geany-plugin-pretty-printer/Makefile | 1 + devel/ghub/Makefile | 3 +- devel/ghub/distinfo | 6 +- devel/gitaly/Makefile | 9 +- devel/gitaly/distinfo | 10 +- devel/gitaly/files/patch-Makefile | 8 +- devel/gitg/Makefile | 2 +- devel/gnome-builder/Makefile | 1 + devel/goffice/Makefile | 1 + devel/gstreamer1-plugins-soup/Makefile | 2 +- devel/gtranslator/Makefile | 1 + devel/gvfs/Makefile | 1 + devel/hwloc/Makefile | 1 + devel/hwloc2/Makefile | 1 + devel/inih/Makefile | 5 +- devel/inih/distinfo | 6 +- devel/kf5-kdoctools/Makefile | 1 + devel/kf5-kio/Makefile | 1 + devel/lazygit/Makefile | 4 +- devel/lazygit/distinfo | 10 +- devel/lfcbase/Makefile | 2 +- devel/lfcbase/distinfo | 6 +- devel/libabigail/Makefile | 1 + devel/libaravis/Makefile | 1 + devel/libassetml/Makefile | 2 +- devel/libdap/Makefile | 1 + devel/libdbusmenu/Makefile | 2 +- devel/libgdata/Makefile | 2 +- devel/libght/Makefile | 2 +- devel/libglade2/Makefile | 2 +- devel/libglademm24/Makefile | 2 +- devel/libgnt/Makefile | 1 + devel/libgsf/Makefile | 2 +- devel/libical/Makefile | 2 +- devel/libiqxmlrpc/Makefile | 2 +- devel/liblangtag/Makefile | 1 + devel/libosinfo/Makefile | 1 + devel/libpafe-ruby/files/patch-extconf.rb | 11 + devel/libplist/Makefile | 2 +- devel/libqb/Makefile | 1 + devel/librcc/Makefile | 2 +- devel/librest/Makefile | 1 + devel/libsigrokdecode/Makefile | 4 +- devel/libsigrokdecode/files/patch-configure | 44 + devel/libsmpp34/Makefile | 1 + devel/libsoup/Makefile | 1 + devel/libsoup3/Makefile | 2 +- devel/libvirt-glib/Makefile | 1 + devel/libvirt/Makefile | 1 + devel/llvm-cheri/Makefile | 2 +- devel/llvm-devel/Makefile | 2 +- devel/llvm10/Makefile | 2 +- devel/llvm11/Makefile | 2 +- devel/llvm12/Makefile | 2 +- devel/llvm13/Makefile | 2 +- devel/llvm14/Makefile | 13 +- devel/llvm14/distinfo | 6 +- devel/llvm70/Makefile | 2 +- devel/llvm80/Makefile | 2 +- devel/llvm90/Makefile | 2 +- devel/m17n-lib/Makefile | 2 +- devel/msitools/Makefile | 1 + devel/ocaml-sdl/files/patch-src_Makefile | 20 + devel/ocaml-sdl/files/patch-src_sdlmouse.ml | 17 + devel/p5-Alien-Build/Makefile | 2 +- devel/p5-Alien-Build/distinfo | 6 +- devel/p5-Alien-Build/pkg-plist | 2 + devel/p5-App-cpanminus/Makefile | 2 +- devel/p5-App-cpanminus/distinfo | 6 +- devel/p5-CPAN-Perl-Releases/Makefile | 2 +- devel/p5-CPAN-Perl-Releases/distinfo | 6 +- devel/p5-Class-MethodMaker/Makefile | 4 + devel/p5-Devel-MAT-Dumper/Makefile | 2 +- devel/p5-Devel-MAT-Dumper/distinfo | 6 +- devel/p5-Devel-MAT/Makefile | 2 +- devel/p5-Devel-MAT/distinfo | 6 +- devel/p5-Devel-MAT/pkg-plist | 1 + devel/p5-Devel-PPPort/Makefile | 2 +- devel/p5-Devel-PPPort/distinfo | 6 +- devel/p5-Exporter-Lite/Makefile | 3 +- devel/p5-File-MimeInfo/Makefile | 2 +- devel/p5-File-MimeInfo/distinfo | 6 +- devel/p5-Inline-C/Makefile | 4 +- devel/p5-Inline-C/distinfo | 6 +- devel/p5-Log-TraceMessages/Makefile | 2 + devel/p5-Module-Load-Util/Makefile | 2 +- devel/p5-Module-Load-Util/distinfo | 6 +- devel/p5-No-Worries/Makefile | 2 +- devel/p5-Object-Pad/Makefile | 4 +- devel/p5-Object-Pad/distinfo | 6 +- devel/p5-Ouch/Makefile | 6 +- devel/p5-Test-Time/Makefile | 6 + devel/p5-Test2-Harness-UI/Makefile | 2 +- devel/p5-Test2-Harness-UI/distinfo | 6 +- devel/p5-Test2-Harness-UI/pkg-plist | 7 + devel/p5-Test2-Harness/Makefile | 2 +- devel/p5-Test2-Harness/distinfo | 6 +- devel/pecl-grpc/Makefile | 2 +- devel/pecl-grpc/distinfo | 6 +- devel/phpunit9/Makefile | 2 +- devel/phpunit9/distinfo | 6 +- devel/py-DateTime/files/patch-2to3 | 526 ++ .../files/patch-2to3 | 64 + devel/py-Products.PloneLanguageTool/Makefile | 5 +- devel/py-aiosignal/Makefile | 22 + devel/py-aiosignal/distinfo | 3 + devel/py-aiosignal/pkg-descr | 12 + devel/py-anyjson/files/patch-2to3 | 44 + .../py-apache_conf_parser/files/patch-indent | 13 + devel/py-astroid/Makefile | 4 +- devel/py-astroid/distinfo | 6 +- devel/py-asyncio/Makefile | 22 - devel/py-asyncio/distinfo | 2 - devel/py-asyncio/pkg-descr | 3 - devel/py-azure-core/Makefile | 5 +- devel/py-azure-core/distinfo | 6 +- devel/py-behave/files/patch-setup.py | 11 + devel/py-cachy/Makefile | 1 + devel/py-cachy/files/patch-setup.py | 12 +- devel/py-canonicaljson/Makefile | 12 +- devel/py-canonicaljson/distinfo | 6 +- devel/py-cdg/files/patch-2to3 | 16 + devel/py-cliff/Makefile | 2 +- devel/py-cliff/distinfo | 6 +- devel/py-cloudpickle/Makefile | 14 +- devel/py-cloudpickle/distinfo | 6 +- devel/py-cloudpickle/pkg-descr | 18 +- devel/py-d2to1/Makefile | 2 + devel/py-daemon-runner/files/patch-2to3 | 11 + devel/py-dal/files/patch-2to3 | 604 +++ devel/py-dask/Makefile | 4 +- devel/py-dask/distinfo | 6 +- devel/py-datadog/Makefile | 7 +- devel/py-datadog/distinfo | 6 +- devel/py-datadog/files/setup.py | 53 + devel/py-ddtrace/Makefile | 2 +- devel/py-ddtrace/distinfo | 6 +- devel/py-distributed/Makefile | 6 +- devel/py-distributed/distinfo | 6 +- .../files/patch-requirements.txt | 9 - devel/py-epc/Makefile | 17 + devel/py-epc/distinfo | 3 + devel/py-epc/pkg-descr | 6 + devel/py-executing/Makefile | 2 +- devel/py-executing/distinfo | 6 +- devel/py-extremes/files/patch-2to3 | 11 + devel/py-extremes/files/patch-setup.py | 4 +- devel/py-flatbuffers/Makefile | 24 +- devel/py-flatbuffers/distinfo | 6 +- devel/py-flatbuffers/pkg-descr | 6 + devel/py-flexmock/Makefile | 2 +- devel/py-flexmock/distinfo | 6 +- devel/py-frozenlist/Makefile | 13 +- devel/py-frozenlist/distinfo | 6 +- devel/py-fsspec/Makefile | 4 +- devel/py-fsspec/distinfo | 6 +- devel/py-google-cloud-iam/Makefile | 6 +- devel/py-google-cloud-iam/distinfo | 6 +- devel/py-grizzled/Makefile | 5 +- devel/py-grizzled/files/patch-2to3 | 1595 +++++++ devel/py-hash_ring/files/patch-2to3 | 12 + devel/py-installer/Makefile | 2 +- devel/py-installer/distinfo | 6 +- devel/py-installer/pkg-descr | 2 +- devel/py-jupyter-core/Makefile | 2 +- devel/py-jupyter-core/distinfo | 6 +- devel/py-keystoneauth1/Makefile | 2 +- devel/py-keystoneauth1/distinfo | 6 +- devel/py-libusb1/Makefile | 6 +- devel/py-libusb1/distinfo | 6 +- .../files/patch-usb1_____init____.py | 4 +- devel/py-marshmallow/Makefile | 7 +- devel/py-marshmallow/distinfo | 6 +- devel/py-minimongo/files/patch-2to3 | 29 + devel/py-mongokit/files/patch-2to3 | 272 ++ devel/py-moto/Makefile | 2 +- devel/py-moto/distinfo | 6 +- devel/py-nbclassic/Makefile | 5 +- devel/py-nbclassic/distinfo | 6 +- devel/py-nbclient/Makefile | 4 +- devel/py-nbclient/distinfo | 6 +- devel/py-nbconvert/Makefile | 5 +- devel/py-nbconvert/distinfo | 6 +- devel/py-nbformat/Makefile | 7 +- devel/py-nbformat/distinfo | 6 +- devel/py-notebook-shim/Makefile | 23 + devel/py-notebook-shim/distinfo | 3 + devel/py-notebook-shim/pkg-descr | 4 + devel/py-omnijson/files/patch-2to3 | 209 + devel/py-optik/files/patch-2to3 | 352 ++ devel/py-ordereddict/Makefile | 2 + devel/py-os-brick/Makefile | 2 +- .../py-os-brick/files/patch-requirements.txt | 11 - devel/py-pastel/Makefile | 6 + devel/py-pastel/files/patch-setup.py | 11 + devel/py-path/Makefile | 5 +- devel/py-path/distinfo | 6 +- devel/py-path/files/setup.py | 3 + devel/py-pdm-pep517/Makefile | 2 +- devel/py-pdm-pep517/distinfo | 6 +- devel/py-pdm-pep517/files/setup.py | 4 +- devel/py-pdm/Makefile | 4 +- devel/py-pdm/distinfo | 6 +- devel/py-pdm/files/setup.py | 2 +- devel/py-pip-api/Makefile | 2 +- devel/py-pip-api/distinfo | 6 +- devel/py-plex/files/patch-2to3 | 517 ++ devel/py-poetry-core/Makefile | 2 +- devel/py-poetry-core/distinfo | 6 +- devel/py-prettytable/Makefile | 2 +- devel/py-prettytable/distinfo | 6 +- devel/py-proto-plus/Makefile | 2 +- devel/py-proto-plus/distinfo | 6 +- devel/py-pycalendar/files/patch-2to3 | 48 + devel/py-pydevd/files/patch-2to3 | 827 ++++ devel/py-pygpx/files/patch-2to3 | 44 + devel/py-pyrepl/files/patch-2to3 | 703 +++ devel/py-pytest-flake8/Makefile | 4 +- devel/py-pytest-flake8/distinfo | 6 +- devel/py-pytest-subtests/Makefile | 23 + devel/py-pytest-subtests/distinfo | 3 + devel/py-pytest-subtests/pkg-descr | 4 + devel/py-python-application/files/patch-2to3 | 1117 +++++ devel/py-python-magic/Makefile | 15 +- devel/py-python-magic/distinfo | 6 +- devel/py-python-magic/pkg-descr | 7 +- devel/py-pyzipper/Makefile | 3 + devel/py-qt5-pyqt/Makefile | 1 + devel/py-rapidfuzz/Makefile | 2 + devel/py-readme-renderer/Makefile | 4 +- devel/py-readme-renderer/distinfo | 6 +- devel/py-responses/Makefile | 5 +- devel/py-responses/distinfo | 6 +- devel/py-simpleparse/files/patch-setup.py | 11 + devel/py-simpletal/files/patch-2to3 | 811 ++++ devel/py-stsci.distutils/files/patch-2to3 | 144 + devel/py-testoob/Makefile | 2 +- devel/py-testoob/files/patch-2to3 | 1689 +++++++ devel/py-types-docutils/Makefile | 2 +- devel/py-types-docutils/distinfo | 6 +- devel/py-urlimport/files/patch-2to3 | 68 + devel/py-userpath/Makefile | 1 + devel/py-userpath/files/setup.py | 1 - devel/py-wrapt/Makefile | 2 +- devel/py-wrapt/distinfo | 6 +- devel/py-xarray/Makefile | 2 +- devel/py-xarray/distinfo | 6 +- devel/py-yapps2/files/patch-2to3 | 162 + devel/py-z3c.autoinclude/files/patch-2to3 | 11 + devel/py-zict/Makefile | 2 +- devel/py-zict/distinfo | 6 +- .../py-zope.cachedescriptors/files/patch-2to3 | 11 + devel/py-zope.contenttype/files/patch-2to3 | 31 + devel/py-zope.datetime/files/patch-2to3 | 167 + devel/py-zope.deferredimport/files/patch-2to3 | 55 + devel/py-zope.dottedname/Makefile | 2 + devel/py-zope.generations/files/patch-2to3 | 22 + devel/py-zope.i18n/files/patch-2to3 | 529 ++ devel/py-zope.processlifetime/Makefile | 2 + devel/py-zope.sequencesort/files/patch-2to3 | 45 + devel/py-zope.size/files/patch-2to3 | 78 + devel/qscintilla2-qt5/distinfo | 6 +- devel/regexxer/Makefile | 2 +- devel/rkcommon/Makefile | 6 + devel/rubygem-actionview52/Makefile | 2 +- devel/rubygem-actionview52/distinfo | 6 +- devel/rubygem-actionview60/Makefile | 2 +- devel/rubygem-actionview60/distinfo | 6 +- devel/rubygem-actionview61/Makefile | 2 +- devel/rubygem-actionview61/distinfo | 6 +- devel/rubygem-actionview70/Makefile | 2 +- devel/rubygem-actionview70/distinfo | 6 +- devel/rubygem-activejob52/Makefile | 2 +- devel/rubygem-activejob52/distinfo | 6 +- devel/rubygem-activejob60/Makefile | 2 +- devel/rubygem-activejob60/distinfo | 6 +- devel/rubygem-activejob61/Makefile | 2 +- devel/rubygem-activejob61/distinfo | 6 +- devel/rubygem-activejob70/Makefile | 2 +- devel/rubygem-activejob70/distinfo | 6 +- devel/rubygem-activesupport52/Makefile | 2 +- devel/rubygem-activesupport52/distinfo | 6 +- devel/rubygem-activesupport60/Makefile | 2 +- devel/rubygem-activesupport60/distinfo | 6 +- devel/rubygem-activesupport61/Makefile | 2 +- devel/rubygem-activesupport61/distinfo | 6 +- devel/rubygem-activesupport70/Makefile | 2 +- devel/rubygem-activesupport70/distinfo | 6 +- devel/rubygem-async-io/Makefile | 2 +- devel/rubygem-async-io/distinfo | 6 +- devel/rubygem-aws-partitions/Makefile | 2 +- devel/rubygem-aws-partitions/distinfo | 6 +- devel/rubygem-aws-sdk-amplify/Makefile | 2 +- devel/rubygem-aws-sdk-amplify/distinfo | 6 +- .../rubygem-aws-sdk-amplifyuibuilder/Makefile | 2 +- .../rubygem-aws-sdk-amplifyuibuilder/distinfo | 6 +- devel/rubygem-aws-sdk-appflow/Makefile | 2 +- devel/rubygem-aws-sdk-appflow/distinfo | 6 +- devel/rubygem-aws-sdk-appregistry/Makefile | 2 +- devel/rubygem-aws-sdk-appregistry/distinfo | 6 +- devel/rubygem-aws-sdk-athena/Makefile | 2 +- devel/rubygem-aws-sdk-athena/distinfo | 6 +- devel/rubygem-aws-sdk-chime/Makefile | 2 +- devel/rubygem-aws-sdk-chime/distinfo | 6 +- .../rubygem-aws-sdk-chimesdkmeetings/Makefile | 2 +- .../rubygem-aws-sdk-chimesdkmeetings/distinfo | 6 +- devel/rubygem-aws-sdk-cloudtrail/Makefile | 2 +- devel/rubygem-aws-sdk-cloudtrail/distinfo | 6 +- devel/rubygem-aws-sdk-comprehend/Makefile | 2 +- devel/rubygem-aws-sdk-comprehend/distinfo | 6 +- devel/rubygem-aws-sdk-connect/Makefile | 2 +- devel/rubygem-aws-sdk-connect/distinfo | 6 +- devel/rubygem-aws-sdk-core/Makefile | 2 +- devel/rubygem-aws-sdk-core/distinfo | 6 +- devel/rubygem-aws-sdk-devopsguru/Makefile | 2 +- devel/rubygem-aws-sdk-devopsguru/distinfo | 6 +- devel/rubygem-aws-sdk-ec2/Makefile | 4 +- devel/rubygem-aws-sdk-ec2/distinfo | 6 +- devel/rubygem-aws-sdk-ecr/Makefile | 2 +- devel/rubygem-aws-sdk-ecr/distinfo | 6 +- devel/rubygem-aws-sdk-ecs/Makefile | 2 +- devel/rubygem-aws-sdk-ecs/distinfo | 6 +- devel/rubygem-aws-sdk-eks/Makefile | 2 +- devel/rubygem-aws-sdk-eks/distinfo | 6 +- devel/rubygem-aws-sdk-elasticache/Makefile | 2 +- devel/rubygem-aws-sdk-elasticache/distinfo | 6 +- devel/rubygem-aws-sdk-finspacedata/Makefile | 2 +- devel/rubygem-aws-sdk-finspacedata/distinfo | 6 +- devel/rubygem-aws-sdk-fis/Makefile | 2 +- devel/rubygem-aws-sdk-fis/distinfo | 6 +- devel/rubygem-aws-sdk-fsx/Makefile | 2 +- devel/rubygem-aws-sdk-fsx/distinfo | 6 +- devel/rubygem-aws-sdk-gamelift/Makefile | 2 +- devel/rubygem-aws-sdk-gamelift/distinfo | 6 +- devel/rubygem-aws-sdk-greengrassv2/Makefile | 2 +- devel/rubygem-aws-sdk-greengrassv2/distinfo | 6 +- devel/rubygem-aws-sdk-kafkaconnect/Makefile | 2 +- devel/rubygem-aws-sdk-kafkaconnect/distinfo | 6 +- devel/rubygem-aws-sdk-kendra/Makefile | 2 +- devel/rubygem-aws-sdk-kendra/distinfo | 6 +- devel/rubygem-aws-sdk-keyspaces/Makefile | 22 + devel/rubygem-aws-sdk-keyspaces/distinfo | 3 + devel/rubygem-aws-sdk-keyspaces/pkg-descr | 4 + devel/rubygem-aws-sdk-lambda/Makefile | 2 +- devel/rubygem-aws-sdk-lambda/distinfo | 6 +- devel/rubygem-aws-sdk-lexmodelsv2/Makefile | 2 +- devel/rubygem-aws-sdk-lexmodelsv2/distinfo | 6 +- devel/rubygem-aws-sdk-macie/Makefile | 2 +- devel/rubygem-aws-sdk-macie/distinfo | 6 +- devel/rubygem-aws-sdk-mediaconvert/Makefile | 2 +- devel/rubygem-aws-sdk-mediaconvert/distinfo | 6 +- devel/rubygem-aws-sdk-mediapackage/Makefile | 2 +- devel/rubygem-aws-sdk-mediapackage/distinfo | 6 +- devel/rubygem-aws-sdk-mgn/Makefile | 2 +- devel/rubygem-aws-sdk-mgn/distinfo | 6 +- .../Makefile | 2 +- .../distinfo | 6 +- devel/rubygem-aws-sdk-outposts/Makefile | 2 +- devel/rubygem-aws-sdk-outposts/distinfo | 6 +- devel/rubygem-aws-sdk-panorama/Makefile | 2 +- devel/rubygem-aws-sdk-panorama/distinfo | 6 +- devel/rubygem-aws-sdk-rds/Makefile | 2 +- devel/rubygem-aws-sdk-rds/distinfo | 6 +- devel/rubygem-aws-sdk-resources/Makefile | 3 +- devel/rubygem-aws-sdk-resources/distinfo | 6 +- .../Makefile | 2 +- .../distinfo | 6 +- devel/rubygem-aws-sdk-secretsmanager/Makefile | 2 +- devel/rubygem-aws-sdk-secretsmanager/distinfo | 6 +- devel/rubygem-aws-sdk-synthetics/Makefile | 2 +- devel/rubygem-aws-sdk-synthetics/distinfo | 6 +- .../rubygem-aws-sdk-timestreamquery/Makefile | 2 +- .../rubygem-aws-sdk-timestreamquery/distinfo | 6 +- .../Makefile | 2 +- .../distinfo | 6 +- .../Makefile | 2 +- .../distinfo | 6 +- devel/rubygem-aws-sdk-transfer/Makefile | 2 +- devel/rubygem-aws-sdk-transfer/distinfo | 6 +- devel/rubygem-bootsnap/Makefile | 2 +- devel/rubygem-bootsnap/distinfo | 6 +- devel/rubygem-cucumber-rails/Makefile | 4 +- devel/rubygem-cucumber-rails/distinfo | 6 +- devel/rubygem-error_highlight/Makefile | 24 + devel/rubygem-error_highlight/distinfo | 3 + devel/rubygem-error_highlight/pkg-descr | 7 + devel/rubygem-et-orbi/Makefile | 2 +- devel/rubygem-et-orbi/distinfo | 6 +- devel/rubygem-faker/Makefile | 8 +- devel/rubygem-faker/distinfo | 6 +- devel/rubygem-fog-core/Makefile | 4 +- devel/rubygem-fog-core/distinfo | 6 +- devel/rubygem-google-apis-compute_v1/Makefile | 2 +- devel/rubygem-google-apis-compute_v1/distinfo | 6 +- .../Makefile | 2 +- .../distinfo | 6 +- devel/rubygem-google-apis-pubsub_v1/Makefile | 2 +- devel/rubygem-google-apis-pubsub_v1/distinfo | 6 +- devel/rubygem-i18n/Makefile | 2 +- devel/rubygem-i18n/distinfo | 6 +- devel/rubygem-inspec-core/Makefile | 3 +- devel/rubygem-inspec-core/distinfo | 6 +- devel/rubygem-ipynbdiff/Makefile | 6 +- devel/rubygem-ipynbdiff/distinfo | 6 +- devel/rubygem-mongo/files/patch-gemspec | 2 +- devel/rubygem-pedump/Makefile | 2 +- devel/rubygem-pedump/distinfo | 6 +- devel/rubygem-que-scheduler/Makefile | 23 + devel/rubygem-que-scheduler/distinfo | 3 + devel/rubygem-que-scheduler/pkg-descr | 6 + devel/rubygem-que/Makefile | 7 +- devel/rubygem-que/distinfo | 6 +- devel/rubygem-rails-i18n-rails70/Makefile | 2 +- devel/rubygem-rails-i18n-rails70/distinfo | 6 +- devel/rubygem-ransack/Makefile | 6 +- devel/rubygem-ransack/distinfo | 6 +- devel/rubygem-rspec-rails/Makefile | 2 +- devel/rubygem-rspec-rails/distinfo | 6 +- devel/rubygem-rubocop-performance/Makefile | 2 +- devel/rubygem-rubocop-performance/distinfo | 6 +- devel/rubygem-rubocop-rspec/Makefile | 2 +- devel/rubygem-rubocop-rspec/distinfo | 6 +- devel/rubygem-sentry-rails/Makefile | 22 + devel/rubygem-sentry-rails/distinfo | 3 + devel/rubygem-sentry-rails/pkg-descr | 4 + devel/rubygem-sentry-ruby-core/Makefile | 21 + devel/rubygem-sentry-ruby-core/distinfo | 3 + devel/rubygem-sentry-ruby-core/pkg-descr | 4 + devel/rubygem-sentry-ruby/Makefile | 22 + devel/rubygem-sentry-ruby/distinfo | 3 + devel/rubygem-sentry-ruby/pkg-descr | 4 + devel/rubygem-sentry-sidekiq/Makefile | 22 + devel/rubygem-sentry-sidekiq/distinfo | 3 + devel/rubygem-sentry-sidekiq/pkg-descr | 4 + devel/rubygem-sprockets/Makefile | 2 +- devel/rubygem-sprockets/distinfo | 6 +- devel/rubygem-view_component-rails61/Makefile | 23 + devel/rubygem-view_component-rails61/distinfo | 3 + .../rubygem-view_component-rails61/pkg-descr | 3 + devel/rubygem-xdg/Makefile | 4 +- devel/rubygem-xdg/distinfo | 6 +- devel/rubygem-xdg3/Makefile | 1 + devel/rubygem-xdg3/files/patch-gemspec | 11 + devel/rubygem-xdg4/Makefile | 1 + devel/rubygem-xdg4/files/patch-gemspec | 11 + devel/rust-cbindgen/Makefile | 21 +- devel/rust-cbindgen/distinfo | 38 +- devel/shiboken2/Makefile | 2 +- devel/smooth/Makefile | 1 + devel/sonarqube-community/Makefile | 6 +- devel/sonarqube-community/distinfo | 10 +- devel/sope/Makefile | 1 + devel/sope2/Makefile | 2 +- devel/tclxml/Makefile | 2 +- devel/ticcutils/Makefile | 1 + devel/umbrello/Makefile | 1 + devel/universal-ctags/Makefile | 1 + devel/wf-config/Makefile | 1 + devel/xdg-user-dirs/Makefile | 1 + devel/z88dk/Makefile | 2 +- devel/zapcc/Makefile | 2 +- dns/bind9-devel/Makefile | 7 +- dns/bind9-devel/distinfo | 6 +- .../files/extrapatch-bind-min-override-ttl | 18 +- dns/bind9-devel/files/patch-configure.ac | 4 +- dns/bind9-devel/pkg-plist | 3 - dns/bind911/Makefile | 2 +- dns/bind918/Makefile | 3 +- dns/bind918/pkg-plist | 15 +- dns/dnscap/Makefile | 3 +- dns/dnscap/distinfo | 6 +- dns/opendnssec2/Makefile | 1 + dns/py-publicsuffixlist/Makefile | 3 +- dns/py-publicsuffixlist/distinfo | 6 +- editors/abiword/Makefile | 1 + editors/diamond/Makefile | 1 + editors/elementary-code/Makefile | 2 +- editors/emacs-devel/Makefile | 1 + editors/gedit-plugins/Makefile | 1 + editors/gedit/Makefile | 2 +- editors/ghostwriter/Makefile | 11 +- editors/ghostwriter/distinfo | 6 +- .../files/patch-src_MarkdownEditor.cpp | 10 - editors/ghostwriter/pkg-plist | 6 +- editors/gnome-latex/Makefile | 1 + editors/gobby/Makefile | 2 +- editors/gummi/Makefile | 2 +- editors/jucipp/Makefile | 2 +- editors/libreoffice/Makefile | 2 +- editors/libreoffice6/Makefile | 2 +- editors/marker/Makefile | 1 + editors/morla/Makefile | 2 +- editors/mousepad/Makefile | 1 + editors/openoffice-4/Makefile | 2 +- editors/openoffice-devel/Makefile | 2 +- editors/pluma-plugins/Makefile | 2 +- editors/pluma/Makefile | 2 +- editors/quilter/Makefile | 2 +- editors/setzer/Makefile | 2 +- editors/vscode/Makefile | 1 + editors/xed/Makefile | 1 + editors/xmlcopyeditor/Makefile | 1 + emulators/catapult/Makefile | 1 + emulators/fuse/Makefile | 1 + emulators/higan/Makefile | 2 +- emulators/nemu/Makefile | 1 + emulators/qemu-cheri/Makefile | 2 +- emulators/qemu-devel/Makefile | 1 + emulators/qemu-powernv/Makefile | 2 +- emulators/qemu5/Makefile | 1 + emulators/snes9x-gtk/Makefile | 2 +- emulators/tiemu3/Makefile | 2 +- emulators/virtualbox-ose-legacy/Makefile | 2 +- .../virtualbox-ose-nox11-legacy/Makefile | 1 + emulators/virtualbox-ose-nox11/Makefile | 1 + emulators/virtualbox-ose/Makefile | 2 +- emulators/wine-proton/Makefile | 2 +- emulators/wine/Makefile | 1 + emulators/yuzu/Makefile | 4 +- emulators/yuzu/distinfo | 6 +- finance/gnucash-docs/Makefile | 2 +- finance/gnucash/Makefile | 2 +- finance/grisbi/Makefile | 1 + finance/libofx/Makefile | 1 + finance/nextcloud-cospend/Makefile | 2 +- finance/nextcloud-cospend/distinfo | 6 +- finance/odoo/Makefile | 1 + finance/py-bitcoin/Makefile | 2 +- finance/py-python-obelisk/files/patch-2to3 | 297 ++ finance/py-yfinance/Makefile | 4 +- finance/py-yfinance/distinfo | 6 +- finance/quickfix/Makefile | 1 + french/verbiste/Makefile | 1 + ftp/R-cran-RCurl/Makefile | 1 + ftp/gstreamer1-plugins-curl/Makefile | 2 +- ftp/py-pycurl/Makefile | 2 +- ftp/py-pycurl/distinfo | 6 +- games/0ad/Makefile | 4 +- games/0ad/files/setuptools.diff | 19 + games/Makefile | 1 + games/armagetronad/Makefile | 2 +- games/el/Makefile | 1 + games/exult/Makefile | 1 + games/freedoko/Makefile | 2 +- games/gtkatlantic/Makefile | 1 + games/gtkevemon/Makefile | 2 +- games/gtkradiant/Makefile | 2 +- games/libmaitretarot/Makefile | 2 +- games/libmt_client/Makefile | 2 +- games/lincity-ng/Makefile | 2 +- games/lordsawar/Makefile | 1 + games/maitretarot/Makefile | 2 +- games/manaplus/Makefile | 2 +- games/mt_dolphin_ia/Makefile | 2 +- games/mt_gtk_client/Makefile | 2 +- games/naev/Makefile | 2 +- games/netradiant/Makefile | 2 +- games/openlierox/Makefile | 2 +- games/py-pychess/Makefile | 2 +- games/tuxmath/Makefile | 2 +- games/tuxtype/Makefile | 2 +- games/xmoto/Makefile | 1 + games/yquake2/Makefile | 45 + games/yquake2/distinfo | 3 + games/yquake2/files/patch-Makefile | 28 + .../yquake2/files/patch-src_client_vid_vid.c | 17 + .../files/patch-src_common_filesystem.c | 12 + games/yquake2/pkg-descr | 9 + games/yquake2/pkg-plist | 16 + graphics/GraphicsMagick/Makefile | 1 + graphics/ImageMagick6/Makefile | 2 +- graphics/ImageMagick7/Makefile | 2 +- graphics/airsaned/Makefile | 5 +- graphics/airsaned/distinfo | 6 +- graphics/art/Makefile | 1 + graphics/aseprite/Makefile | 2 +- graphics/atril-lite/Makefile | 1 + graphics/atril/Makefile | 2 +- graphics/autopano-sift-c/Makefile | 2 +- graphics/blender-lts28/Makefile | 1 + graphics/blender-lts29/Makefile | 1 + graphics/blender/Makefile | 1 + graphics/cairomm/Makefile | 2 +- graphics/cegui/Makefile | 2 +- graphics/cluttermm/Makefile | 2 +- graphics/cptutils/Makefile | 1 + graphics/darktable/Makefile | 2 +- graphics/dcmtk/Makefile | 2 +- graphics/delaboratory/Makefile | 2 +- graphics/dia/Makefile | 2 +- graphics/digikam/Makefile | 1 + graphics/elementary-photos/Makefile | 2 +- graphics/eog-plugins/Makefile | 1 + graphics/eom/Makefile | 1 + graphics/evince/Makefile | 2 +- graphics/flam3/Makefile | 1 + graphics/frogr/Makefile | 1 + graphics/fyre/Makefile | 2 +- graphics/gdal/Makefile | 2 +- graphics/gdk-pixbuf2/Makefile | 5 +- .../files/gdk-pixbuf-query-loaders.ucl.in | 17 + graphics/gdk-pixbuf2/pkg-plist | 2 - graphics/gimmage/Makefile | 2 +- graphics/gnash/Makefile | 2 +- graphics/goocanvasmm2/Makefile | 2 +- graphics/gpaint/Makefile | 2 +- graphics/gstreamer1-plugins-aalib/Makefile | 2 +- graphics/gstreamer1-plugins-cairo/Makefile | 2 +- .../gstreamer1-plugins-gdkpixbuf/Makefile | 2 +- graphics/gstreamer1-plugins-gl/Makefile | 2 +- graphics/gstreamer1-plugins-jpeg/Makefile | 2 +- graphics/gstreamer1-plugins-kms/Makefile | 2 +- graphics/gstreamer1-plugins-libcaca/Makefile | 2 +- .../gstreamer1-plugins-libvisual/Makefile | 2 +- graphics/gstreamer1-plugins-opencv/Makefile | 2 +- graphics/gstreamer1-plugins-openexr/Makefile | 2 +- graphics/gstreamer1-plugins-openjpeg/Makefile | 2 +- graphics/gstreamer1-plugins-png/Makefile | 2 +- graphics/gstreamer1-plugins-qt/Makefile | 2 +- graphics/gstreamer1-plugins-rsvg/Makefile | 2 +- graphics/gstreamer1-plugins-vulkan/Makefile | 2 +- graphics/gstreamer1-plugins-webp/Makefile | 2 +- graphics/gstreamer1-plugins-zbar/Makefile | 2 +- graphics/iccxml/Makefile | 1 + graphics/ikona/Makefile | 2 +- graphics/impressive/Makefile | 9 +- graphics/impressive/distinfo | 6 +- graphics/inkscape/Makefile | 1 + graphics/kf5-kimageformats/Makefile | 6 +- graphics/kf5-kimageformats/pkg-plist | 4 +- graphics/libavif/pkg-plist | 2 - graphics/libetonyek01/Makefile | 2 +- graphics/libgeotiff/Makefile | 2 +- graphics/libgeotiff/distinfo | 6 +- graphics/libgnomecanvasmm26/Makefile | 2 +- graphics/libgphoto2/Makefile | 1 + graphics/libjxl/Makefile | 1 + graphics/libjxl/pkg-plist | 2 - graphics/libkdcraw/Makefile | 1 + graphics/libopenraw/Makefile | 2 +- graphics/librsvg2-rust/Makefile | 1 + graphics/librsvg2-rust/pkg-plist | 2 - graphics/librsvg2/Makefile | 2 +- graphics/librsvg2/pkg-plist | 2 - graphics/libsvg/Makefile | 2 +- graphics/mesa-devel/Makefile | 4 +- graphics/mesa-devel/distinfo | 6 +- graphics/mesa-dri/pkg-plist | 1 + graphics/minder/Makefile | 1 + graphics/mypaint-brushes/Makefile | 2 + graphics/nip2/Makefile | 2 +- graphics/opencollada/Makefile | 2 +- graphics/opencollada/files/patch-clang13 | 11 + graphics/opendx/Makefile | 3 +- graphics/openfx-arena/Makefile | 2 +- graphics/oyranos/Makefile | 2 +- graphics/p5-Alien-Gimp/Makefile | 2 +- graphics/p5-Alien-Gimp/distinfo | 6 +- graphics/p5-Image-ExifTool/Makefile | 6 +- graphics/p5-Image-ExifTool/distinfo | 6 +- graphics/p5-Image-ExifTool/pkg-descr | 27 +- graphics/p5-Image-ExifTool/pkg-plist | 14 +- graphics/photoflow/Makefile | 2 +- graphics/piddle/files/patch-2to3 | 2238 +++++++++ graphics/poppler-glib/Makefile | 2 +- graphics/poppler-qt5/Makefile | 2 +- graphics/poppler-utils/Makefile | 2 +- graphics/poppler/Makefile | 1 + graphics/proj-data/Makefile | 2 +- graphics/proj-data/distinfo | 6 +- graphics/proj-data/pkg-plist | 5 + graphics/py-beziers/Makefile | 1 + graphics/py-beziers/files/patch-setup.py | 11 + graphics/py-h3/Makefile | 5 + graphics/py-imageio/Makefile | 6 +- graphics/py-imageio/distinfo | 6 +- graphics/py-pillow/Makefile | 12 +- graphics/py-pillow/distinfo | 6 +- graphics/py-pycha/files/patch-2to3 | 104 + graphics/py-pyproj/Makefile | 4 +- graphics/py-pyproj/distinfo | 6 +- graphics/qgis-ltr/Makefile | 1 + graphics/qgis/Makefile | 1 + graphics/rawstudio/Makefile | 2 +- graphics/rawtherapee/Makefile | 2 +- graphics/rubygem-image_processing/Makefile | 2 +- graphics/rubygem-image_processing/distinfo | 6 +- graphics/rubygem-tanuki_emoji/Makefile | 2 +- graphics/rubygem-tanuki_emoji/distinfo | 6 +- graphics/sane-airscan/Makefile | 1 + graphics/sane-backends/Makefile | 1 + graphics/shotwell/Makefile | 1 + graphics/simple-scan/Makefile | 5 +- graphics/simple-scan/distinfo | 8 +- graphics/swfmill/Makefile | 2 +- graphics/synfig/Makefile | 2 +- graphics/synfigstudio/Makefile | 2 +- graphics/tinyows/Makefile | 1 + graphics/vips/Makefile | 2 +- graphics/waffle/Makefile | 2 +- graphics/wayland/Makefile | 1 + graphics/webp-pixbuf-loader/pkg-plist | 2 - graphics/xournal/Makefile | 2 +- graphics/xournalpp/Makefile | 1 + graphics/yafaray/Makefile | 2 +- irc/hexchat/Makefile | 1 + irc/weechat/Makefile | 2 +- irc/weechat/distinfo | 6 +- japanese/im-ja/Makefile | 2 +- java/openjfx14/Makefile | 2 +- lang/cling/Makefile | 2 +- lang/crystal/Makefile | 21 +- .../extra-patch-src_openssl_lib__crypto.cr | 27 + .../files/extra-patch-src_openssl_lib__ssl.cr | 28 + lang/ferite/Makefile | 2 +- lang/gnustep-base/Makefile | 2 +- lang/mono/Makefile | 1 + .../files/extra-patch-aarch64-race-workaround | 26 + lang/mono5.10/Makefile | 1 + .../files/extra-patch-aarch64-race-workaround | 26 + lang/njs/Makefile | 2 +- lang/perl5-devel/Makefile | 2 +- lang/perl5-devel/distinfo | 6 +- lang/php80/Makefile | 2 +- ..._pcre_pcre2lib_sljit_sljitConfigInternal.h | 11 + lang/php81/Makefile | 2 +- ..._pcre_pcre2lib_sljit_sljitConfigInternal.h | 11 + lang/py-lupa/Makefile | 2 +- lang/py-lupa/distinfo | 6 +- lang/v8-beta/Makefile | 18 +- lang/v8-beta/distinfo | 34 +- lang/v8/Makefile | 16 +- lang/v8/distinfo | 34 +- lang/vala/Makefile | 1 + mail/astroid/Makefile | 2 +- mail/balsa/Makefile | 2 +- mail/cone/Makefile | 1 + mail/dovecot-fts-xapian/Makefile | 12 +- mail/dovecot-fts-xapian/distinfo | 6 +- .../files/patch-src_fts-backend-xapian.cpp | 13 + mail/evolution-ews/Makefile | 1 + mail/evolution/Makefile | 1 + mail/geary/Makefile | 2 +- mail/gnubiff/Makefile | 2 +- mail/msmtp/Makefile | 2 +- mail/msmtp/distinfo | 6 +- .../files/patch-2to3 | 165 + mail/py-pyzmail/files/patch-2to3 | 30 +- mail/rspamd-devel/Makefile | 5 +- mail/rspamd-devel/distinfo | 6 +- mail/rspamd-devel/pkg-plist | 1 + mail/rubygem-actionmailbox60/Makefile | 2 +- mail/rubygem-actionmailbox60/distinfo | 6 +- mail/rubygem-actionmailbox61/Makefile | 2 +- mail/rubygem-actionmailbox61/distinfo | 6 +- mail/rubygem-actionmailbox70/Makefile | 2 +- mail/rubygem-actionmailbox70/distinfo | 6 +- mail/rubygem-actionmailer52/Makefile | 2 +- mail/rubygem-actionmailer52/distinfo | 6 +- mail/rubygem-actionmailer60/Makefile | 2 +- mail/rubygem-actionmailer60/distinfo | 6 +- mail/rubygem-actionmailer61/Makefile | 2 +- mail/rubygem-actionmailer61/distinfo | 6 +- mail/rubygem-actionmailer70/Makefile | 2 +- mail/rubygem-actionmailer70/distinfo | 6 +- mail/rubygem-mail/Makefile | 19 +- mail/rubygem-mail/files/patch-mail.gemspec | 18 - mail/s-nail/Makefile | 2 +- mail/s-nail/distinfo | 6 +- math/Makefile | 3 +- math/R-cran-igraph/Makefile | 1 + math/cadabra2/Makefile | 1 + math/coq/Makefile | 2 +- math/drgeo/Makefile | 2 +- math/ggobi/Makefile | 2 +- math/gkmap/Makefile | 2 +- math/gnome-calculator/Makefile | 1 + math/gnumeric/Makefile | 1 + math/gretl/Makefile | 1 + math/igraph/Makefile | 1 + math/lean/Makefile | 2 +- math/lean/distinfo | 6 +- math/libqalculate/Makefile | 1 + math/mate-calc/Makefile | 1 + math/mdal/Makefile | 2 +- math/octave-forge-image/Makefile | 3 +- math/octave-forge-image/distinfo | 6 +- math/octave/Makefile | 2 +- math/openturns/Makefile | 2 +- math/p5-Math-GMPf/Makefile | 2 +- math/p5-Math-GMPf/distinfo | 6 +- math/p5-Math-GMPq/Makefile | 2 +- math/p5-Math-GMPq/distinfo | 6 +- math/p5-Math-GMPz/Makefile | 2 +- math/p5-Math-GMPz/distinfo | 6 +- math/pdal/Makefile | 2 +- math/polymake/Makefile | 2 +- math/pspp/Makefile | 2 +- math/py-cma/Makefile | 6 +- math/py-cma/distinfo | 6 +- math/py-fastcluster/Makefile | 2 +- math/py-fastcluster/distinfo | 6 +- math/py-fraction/Makefile | 21 + math/py-fraction/distinfo | 3 + math/py-fraction/pkg-descr | 4 + math/py-gym-notices/Makefile | 2 +- math/py-gym-notices/distinfo | 6 +- math/py-gym/Makefile | 2 +- math/py-gym/distinfo | 6 +- math/py-gym/pkg-descr | 2 +- math/py-iohexperimenter/Makefile | 1 + math/py-iohexperimenter/files/patch-setup.py | 11 + math/py-moarchiving/Makefile | 24 + math/py-moarchiving/distinfo | 3 + math/py-moarchiving/pkg-descr | 7 + math/py-mutatormath/Makefile | 25 - math/py-mutatormath/distinfo | 3 - math/py-mutatormath/pkg-descr | 8 - math/py-pdal/Makefile | 2 + math/py-pdal/files/patch-2to3 | 27 + math/py-pyFFTW/Makefile | 2 +- math/py-pybloom/files/patch-2to3 | 60 + math/py-python-igraph/Makefile | 1 + math/py-svgmath/Makefile | 2 +- math/py-svgmath/files/patch-2to3 | 480 ++ math/py-topologic/Makefile | 2 +- math/qalculate-gtk/Makefile | 1 + math/sage/Makefile | 2 + math/sc-im/Makefile | 2 +- math/scilab/Makefile | 1 + math/stan/Makefile | 2 +- math/stan/distinfo | 6 +- math/stanmath/Makefile | 2 +- math/stanmath/distinfo | 6 +- math/stanmath/pkg-plist | 25 +- math/tablix/Makefile | 2 +- math/timbl/Makefile | 1 + misc/clifm/Makefile | 2 +- misc/clifm/distinfo | 6 +- misc/clifm/files/patch-Makefile | 4 +- misc/copperspice-examples/Makefile | 2 +- misc/freebsd-doc-all/Makefile | 2 +- misc/freebsd-doc-en/Makefile | 2 +- misc/freebsd-doc-en/distinfo | 6 +- misc/gimp-help-en/Makefile | 1 + misc/hotkeys/Makefile | 2 +- misc/iio-oscilloscope/Makefile | 1 + misc/libcomps/Makefile | 1 + misc/libiio/Makefile | 1 + misc/liblxi/Makefile | 1 + misc/librepo/Makefile | 1 + misc/libsupertone/Makefile | 2 +- misc/parley/Makefile | 1 + misc/py-QSpectrumAnalyzer/Makefile | 2 +- misc/py-alive-progress/Makefile | 2 +- misc/py-alive-progress/distinfo | 6 +- misc/py-kartograph/files/patch-2to3 | 379 ++ misc/py-onnx-tf/Makefile | 2 +- misc/py-onnx-tf/distinfo | 6 +- misc/py-onnx-tf/files/patch-setup.py | 13 + misc/py-soapy_power/Makefile | 2 +- misc/py-tqdm/Makefile | 2 +- misc/py-tqdm/distinfo | 6 +- misc/py-xgboost/Makefile | 3 + misc/py-xgboost/files/patch-2to3 | 375 ++ misc/rubygem-octicons/Makefile | 2 +- misc/rubygem-octicons/distinfo | 6 +- misc/shared-mime-info/Makefile | 21 +- misc/shared-mime-info/distinfo | 10 +- .../files/patch-data_freedesktop.org.xml.in | 10 + .../files/patch-data_freedesktop__generate.sh | 10 - misc/shared-mime-info/files/patch-meson.build | 11 + misc/tellico/Makefile | 1 + misc/visp/Makefile | 2 +- misc/xfce4-weather-plugin/Makefile | 1 + misc/xiphos/Makefile | 2 +- misc/zoneinfo/Makefile | 2 +- misc/zoneinfo/distinfo | 6 +- multimedia/audacious-plugins/Makefile | 2 +- multimedia/avidemux-cli/Makefile | 2 +- multimedia/avidemux-plugins/Makefile | 2 +- multimedia/avidemux-qt5/Makefile | 2 +- multimedia/avidemux/Makefile | 2 +- multimedia/cineencoder/Makefile | 2 +- multimedia/cineencoder/distinfo | 6 +- multimedia/dvdauthor/Makefile | 1 + multimedia/ffmpeg/Makefile | 2 +- multimedia/ffmpeg/files/patch-dav1d | 39 + multimedia/ffmpeg/files/patch-svtav1 | 151 +- multimedia/gmerlin/Makefile | 2 +- .../gstreamer1-editing-services/Makefile | 1 + multimedia/gstreamer1-plugins-aom/Makefile | 2 +- .../gstreamer1-plugins-assrender/Makefile | 2 +- multimedia/gstreamer1-plugins-bad/Makefile | 2 +- multimedia/gstreamer1-plugins-dash/Makefile | 2 +- multimedia/gstreamer1-plugins-dts/Makefile | 2 +- multimedia/gstreamer1-plugins-dv/Makefile | 2 +- .../gstreamer1-plugins-dvdread/Makefile | 2 +- multimedia/gstreamer1-plugins-good/Makefile | 2 +- multimedia/gstreamer1-plugins-hls/Makefile | 2 +- multimedia/gstreamer1-plugins-kate/Makefile | 2 +- .../gstreamer1-plugins-libde265/Makefile | 2 +- .../gstreamer1-plugins-mpeg2dec/Makefile | 2 +- .../gstreamer1-plugins-mpeg2enc/Makefile | 2 +- multimedia/gstreamer1-plugins-mplex/Makefile | 2 +- multimedia/gstreamer1-plugins-msdk/Makefile | 2 +- .../gstreamer1-plugins-openh264/Makefile | 2 +- .../gstreamer1-plugins-resindvd/Makefile | 2 +- multimedia/gstreamer1-plugins-rtmp/Makefile | 2 +- .../Makefile | 2 +- multimedia/gstreamer1-plugins-theora/Makefile | 2 +- multimedia/gstreamer1-plugins-ttml/Makefile | 2 +- multimedia/gstreamer1-plugins-ugly/Makefile | 2 +- multimedia/gstreamer1-plugins-v4l2/Makefile | 2 +- multimedia/gstreamer1-plugins-vpx/Makefile | 2 +- multimedia/gstreamer1-plugins-x264/Makefile | 2 +- multimedia/gstreamer1-plugins-x265/Makefile | 2 +- multimedia/gstreamer1-plugins/Makefile | 2 +- multimedia/gstreamermm/Makefile | 1 + multimedia/gtk-youtube-viewer/Makefile | 2 +- multimedia/gtk-youtube-viewer/distinfo | 6 +- multimedia/handbrake/Makefile | 1 + multimedia/imagination/Makefile | 2 +- multimedia/kodi/Makefile | 2 +- multimedia/libbluray/Makefile | 1 + multimedia/libmediaart/Makefile | 1 + multimedia/libmediainfo/Makefile | 14 +- .../libmediainfo/files/patch-Makefile.am | 22 - .../libmediainfo/files/patch-Makefile.in | 42 + multimedia/mlt6/Makefile | 2 +- multimedia/mlt7/Makefile | 1 + multimedia/mythtv-frontend/Makefile | 2 +- multimedia/mythtv/Makefile | 2 +- multimedia/ogmrip/Makefile | 2 +- multimedia/phonon-gstreamer/Makefile | 2 +- multimedia/svt-av1/Makefile | 2 +- multimedia/totem-pl-parser/Makefile | 1 + multimedia/totem/Makefile | 1 + multimedia/vcdimager/Makefile | 2 +- multimedia/vlc/Makefile | 1 + net-im/cawbird/Makefile | 3 +- net-im/cawbird/distinfo | 6 +- net-im/dendrite/Makefile | 26 +- net-im/dendrite/distinfo | 54 +- net-im/folks/Makefile | 2 +- net-im/fractal/Makefile | 2 +- net-im/kopete/Makefile | 1 + net-im/libaccounts-glib/Makefile | 1 + net-im/libpurple/Makefile | 2 +- net-im/pidgin-sipe/Makefile | 1 + net-im/pidgin-twitter/Makefile | 2 +- net-im/py-matrix-synapse/Makefile | 9 +- net-im/py-matrix-synapse/distinfo | 6 +- .../patch-synapse_python__dependencies.py | 10 +- net-im/py-matrix-synapse/files/pkg-message.in | 11 + net-im/py-matrix-synapse/files/synapse.in | 12 +- net-im/py-punjab/files/patch-2to3 | 263 + net-im/telepathy-farstream/Makefile | 2 +- net-im/telepathy-gabble/Makefile | 1 + net-im/telepathy-glib/Makefile | 2 +- net-im/telepathy-idle/Makefile | 2 +- net-im/telepathy-logger-qt5/Makefile | 1 + net-im/telepathy-logger/Makefile | 2 +- net-im/telepathy-mission-control/Makefile | 1 + net-im/telepathy-qt/Makefile | 2 +- net-im/telepathy-salut/Makefile | 2 +- net-mgmt/arpwatch/Makefile | 2 +- net-mgmt/arpwatch/distinfo | 6 +- net-mgmt/check_ssl_cert/Makefile | 2 +- net-mgmt/check_ssl_cert/distinfo | 6 +- net-mgmt/ndpmon/Makefile | 2 +- net-mgmt/netbox/Makefile | 10 +- net-mgmt/netbox/distinfo | 6 +- net-mgmt/netbox/pkg-plist | 15 +- net-mgmt/torrus/Makefile | 1 + net-mgmt/virt-viewer/Makefile | 1 + net-p2p/dclib/Makefile | 2 +- net-p2p/gtk-gnutella/Makefile | 1 + net-p2p/linuxdcpp/Makefile | 2 +- net-p2p/microdc2/Makefile | 2 +- net-p2p/minder/Makefile | 2 +- net-p2p/py-vertex/Makefile | 5 +- net-p2p/py-vertex/files/patch-2to3 | 519 ++ net-p2p/qbittorrent/Makefile | 2 +- net-p2p/qbittorrent/distinfo | 6 +- net-p2p/retroshare/Makefile | 2 +- net/Sockets/Makefile | 2 +- net/asterisk18/Makefile | 1 + net/ceph14/Makefile | 2 +- net/fort/Makefile | 1 + net/gfbgraph/Makefile | 1 + net/gitlab-agent/Makefile | 2 +- net/gitlab-agent/distinfo | 10 +- net/glusterfs/Makefile | 1 + net/gmid/Makefile | 2 +- net/gmid/distinfo | 6 +- net/gnome-online-accounts/Makefile | 1 + net/gnome-online-miners/Makefile | 1 + net/gq/Makefile | 2 +- net/grilo-plugins/Makefile | 1 + net/grilo/Makefile | 1 + net/gstreamer1-plugins-libmms/Makefile | 2 +- net/gstreamer1-plugins-srtp/Makefile | 2 +- net/gupnp-av/Makefile | 1 + net/gupnp-dlna/Makefile | 2 +- net/gupnp-tools/Makefile | 4 +- net/gupnp/Makefile | 1 + net/kamailio/Makefile | 1 + net/kitinerary/Makefile | 1 + net/libcmis/Makefile | 2 +- net/libgnetwork/Makefile | 2 +- net/libgrss/Makefile | 1 + net/libgweather/Makefile | 2 +- net/libgweather4/Makefile | 1 + net/liblinphone/Makefile | 2 +- net/libmateweather/Makefile | 1 + net/liferea/Makefile | 1 + net/mobile-broadband-provider-info/Makefile | 1 + net/ns3/Makefile | 1 + net/opensips31/Makefile | 2 +- net/p5-Net-OpenSSH/Makefile | 2 +- net/p5-Net-OpenSSH/distinfo | 6 +- net/pacemaker1/Makefile | 2 +- net/pacemaker2/Makefile | 1 + net/pecl-xmlrpc/Makefile | 1 + net/php74-xmlrpc/Makefile | 1 + net/php80-soap/Makefile | 1 + net/php81-soap/Makefile | 1 + net/py-amqplib/files/patch-2to3 | 69 + net/py-errbot/Makefile | 13 + net/py-gdown/Makefile | 2 +- net/py-gdown/distinfo | 6 +- net/py-matrix-synapse-ldap3/Makefile | 8 +- net/py-matrix-synapse-ldap3/distinfo | 6 +- net/py-matrix-synapse-ldap3/files/setup.py | 3 + net/py-pyroute2.core/Makefile | 2 +- net/py-pyroute2.core/distinfo | 6 +- net/py-pyroute2.ethtool/Makefile | 2 +- net/py-pyroute2.ethtool/distinfo | 6 +- net/py-pyroute2.ipset/Makefile | 2 +- net/py-pyroute2.ipset/distinfo | 6 +- net/py-pyroute2.minimal/Makefile | 2 +- net/py-pyroute2.minimal/distinfo | 6 +- net/py-pyroute2.ndb/Makefile | 2 +- net/py-pyroute2.ndb/distinfo | 6 +- net/py-pyroute2.nftables/Makefile | 2 +- net/py-pyroute2.nftables/distinfo | 6 +- net/py-pyroute2.nslink/Makefile | 2 +- net/py-pyroute2.nslink/distinfo | 6 +- net/py-pyroute2.protocols/Makefile | 2 +- net/py-pyroute2.protocols/distinfo | 6 +- net/py-pyroute2/Makefile | 2 +- net/py-pyroute2/distinfo | 6 +- net/py-pyroute2/files/patch-setup.cfg | 10 +- net/py-python-barbicanclient/Makefile | 2 +- net/py-python-barbicanclient/distinfo | 6 +- net/py-python-cinderclient/Makefile | 2 +- net/py-python-cinderclient/distinfo | 6 +- net/py-python-heatclient/Makefile | 2 +- net/py-python-heatclient/distinfo | 6 +- net/py-python-novaclient/Makefile | 2 +- net/py-python-novaclient/distinfo | 6 +- net/py-python-openstackclient/Makefile | 4 +- net/py-python-openstackclient/distinfo | 6 +- net/py-python-socks/Makefile | 2 +- net/py-qt5-networkauth/Makefile | 1 + net/py-softlayer/Makefile | 4 +- net/py-softlayer/distinfo | 6 +- net/py-stomp.py/files/patch-2to3 | 13 + net/py-suds-jurko/Makefile | 2 +- net/py-suds-jurko/files/patch-setup.py | 11 +- net/py-tofu/files/patch-2to3 | 300 ++ net/rubygem-activestorage52/Makefile | 2 +- net/rubygem-activestorage52/distinfo | 6 +- net/rubygem-activestorage60/Makefile | 2 +- net/rubygem-activestorage60/distinfo | 6 +- net/rubygem-activestorage61/Makefile | 2 +- net/rubygem-activestorage61/distinfo | 6 +- net/rubygem-activestorage70/Makefile | 2 +- net/rubygem-activestorage70/distinfo | 6 +- net/rubygem-fog-google/Makefile | 3 +- net/rubygem-gitaly/Makefile | 4 +- net/rubygem-gitaly/distinfo | 6 +- net/rubygem-google-cloud-logging-v2/Makefile | 2 +- net/rubygem-google-cloud-logging-v2/distinfo | 6 +- net/rubygem-opennebula/Makefile | 2 +- net/rubygem-opennebula/distinfo | 6 +- net/rubygem-ovirt-engine-sdk/Makefile | 1 + net/rubygem-train-core/Makefile | 2 +- net/rubygem-train-core/distinfo | 6 +- net/rubygem-train/Makefile | 2 +- net/rubygem-train/distinfo | 6 +- net/traefik/Makefile | 2 +- net/traefik/distinfo | 6 +- net/urlendec/Makefile | 2 +- net/vinagre/Makefile | 2 +- net/wireshark/Makefile | 1 + net/yaz/Makefile | 1 + news/nzbget/Makefile | 1 + print/Makefile | 1 + print/R-cran-knitr/Makefile | 6 +- print/R-cran-knitr/distinfo | 6 +- print/adobe-cmaps/Makefile | 1 + print/fontforge/Makefile | 2 +- print/foomatic-db-engine/Makefile | 1 + print/miktex/Makefile | 1 + print/pdfchain/Makefile | 1 + print/py-fontparts/Makefile | 2 +- print/py-fontparts/distinfo | 6 +- print/py-pdf/Makefile | 2 +- print/py-pdf/files/patch-2to3 | 638 +++ print/py-pypdf3/Makefile | 23 + print/py-pypdf3/distinfo | 3 + print/py-pypdf3/pkg-descr | 8 + print/py-ttfautohint-py/Makefile | 2 +- print/py-ttfautohint-py/distinfo | 6 +- print/py-uharfbuzz/Makefile | 2 +- print/py-uharfbuzz/distinfo | 6 +- print/py-uharfbuzz/files/patch-harfbuzz4 | 440 -- print/scribus-devel/Makefile | 1 + print/simple-fb2-reader/Makefile | 1 + print/xreader/Makefile | 2 +- science/abinit/Makefile | 2 +- science/afni/Makefile | 1 + science/atompaw/Makefile | 2 +- science/atompaw/distinfo | 6 +- science/chemical-mime-data/Makefile | 2 +- science/code_saturne/Makefile | 2 +- science/fleur/Makefile | 1 + science/gchemutils/Makefile | 2 +- science/ghemical/Makefile | 2 +- science/ghmm/Makefile | 1 + science/gromacs/Makefile | 1 + science/lammps/Makefile | 18 +- science/lammps/distinfo | 6 +- science/lammps/pkg-plist | 2 + science/openbabel/Makefile | 2 +- science/orthanc/Makefile | 2 +- science/py-asdf-standard/Makefile | 2 +- science/py-asdf/Makefile | 10 +- science/py-asdf/distinfo | 6 +- science/py-cirq-google/Makefile | 2 +- science/py-cirq-google/distinfo | 6 +- science/py-eccodes/Makefile | 2 +- science/py-eccodes/distinfo | 6 +- science/py-kliff/Makefile | 2 +- science/py-kliff/distinfo | 6 +- science/py-libpysal/Makefile | 2 +- science/py-libpysal/distinfo | 6 +- .../py-mdp/files/patch-mdp__configuration.py | 4 +- science/py-oddt/Makefile | 6 +- science/py-oddt/files/patch-setup.py | 13 + science/py-paida/Makefile | 2 +- science/py-paida/files/patch-2to3 | 2038 ++++++++ science/py-pygeometa/Makefile | 2 +- science/py-pygeometa/distinfo | 6 +- science/py-pymol/Makefile | 2 +- science/py-pyteomics/Makefile | 2 +- science/py-pyteomics/distinfo | 6 +- science/py-qspin/files/patch-2to3 | 113 + science/py-segyio/Makefile | 2 + science/py-segyio/files/patch-2to3 | 302 ++ science/qbox/Makefile | 2 +- science/qbox/distinfo | 6 +- science/qmcpack/Makefile | 2 +- science/qwalk/Makefile | 2 +- security/Makefile | 1 + security/beid/Makefile | 1 + security/belier/files/patch-2to3 | 141 + security/boringssl/Makefile | 4 +- security/boringssl/distinfo | 6 +- security/bzrtp/Makefile | 1 + security/clamav-lts/Makefile | 1 + security/gnome-keyring/Makefile | 1 + security/gnomint/Makefile | 2 +- security/gonepass/Makefile | 2 +- .../greenbone-security-assistant/Makefile | 2 +- security/gstreamer1-plugins-dtls/Makefile | 2 +- security/gvm-libs/Makefile | 1 + security/gvmd/Makefile | 1 + security/hashcat/Makefile | 5 +- .../files/patch-deps_LZMA-SDK_C_CpuArch.c | 34 + security/kc24/Makefile | 2 +- security/lasso/Makefile | 1 + security/lastpass-cli/Makefile | 1 + security/libsecret/Makefile | 1 + security/modsecurity3/Makefile | 1 + security/oath-toolkit/Makefile | 2 +- security/pgpdump/Makefile | 2 +- security/pgpdump/distinfo | 6 +- security/pwman/Makefile | 2 +- security/py-RestrictedPython/files/patch-2to3 | 469 ++ security/py-cerealizer/files/patch-2to3 | 98 + security/py-ecdsa/Makefile | 19 +- security/py-ecdsa/distinfo | 6 +- security/py-ecdsa/pkg-descr | 38 +- security/py-gnutls/files/patch-2to3 | 201 + security/py-pyaxo/files/patch-2to3 | 173 + security/py-pysaml2/Makefile | 2 +- security/py-pysaml2/distinfo | 6 +- security/py-pysodium/Makefile | 1 + security/py-pysodium/files/patch-setup.py | 11 + security/py-python-openid/files/patch-2to3 | 997 ++++ security/py-tlslite-ng/Makefile | 23 + security/py-tlslite-ng/distinfo | 3 + security/py-tlslite-ng/pkg-descr | 7 + security/py-tlslite/files/patch-async | 89 + security/py-xmlsec/Makefile | 1 + security/py-zope.password/files/patch-2to3 | 205 + security/razorback-scriptNugget/Makefile | 2 +- security/rubygem-googleauth/Makefile | 4 +- security/rubygem-googleauth/distinfo | 6 +- security/rubygem-securerandom/Makefile | 2 +- security/rubygem-securerandom/distinfo | 6 +- security/spectre-meltdown-checker/Makefile | 2 +- security/spectre-meltdown-checker/distinfo | 6 +- security/stoken/Makefile | 1 + security/xmlsec1/Makefile | 1 + security/xray-core/Makefile | 22 +- security/xray-core/distinfo | 44 +- sysutils/accountsservice/Makefile | 2 +- sysutils/brasero/Makefile | 1 + sysutils/bulk_extractor/Makefile | 1 + sysutils/cbsd/Makefile | 2 +- sysutils/cbsd/distinfo | 6 +- sysutils/cbsd/pkg-plist | 2 +- sysutils/cfengine-devel/Makefile | 1 + sysutils/cfengine316/Makefile | 1 + sysutils/cfengine317/Makefile | 1 + sysutils/cfengine318/Makefile | 1 + sysutils/cfengine319/Makefile | 1 + sysutils/cinnamon-control-center/Makefile | 1 + sysutils/cinnamon-settings-daemon/Makefile | 1 + sysutils/cluster-glue/Makefile | 2 +- sysutils/consolekit2/Makefile | 2 +- sysutils/fluent-bit/Makefile | 2 +- sysutils/fluent-bit/distinfo | 6 +- sysutils/fusefs-s3fs/Makefile | 1 + sysutils/gapcmon/Makefile | 2 +- sysutils/gconf-editor/Makefile | 2 +- sysutils/gksu/Makefile | 2 +- sysutils/gnome-control-center/Makefile | 3 +- ...ch-panels_user-accounts_cc-realm-manager.c | 25 + sysutils/gnome-control-center/pkg-descr | 2 +- sysutils/gnome-system-monitor/Makefile | 1 + sysutils/gomi/Makefile | 3 +- sysutils/graveman/Makefile | 2 +- sysutils/gsmartcontrol/Makefile | 2 +- sysutils/gstreamer1-plugins-cdio/Makefile | 2 +- sysutils/istatserver/Makefile | 1 + sysutils/jstest-gtk/Makefile | 2 +- sysutils/khelpcenter/Makefile | 1 + sysutils/libgksu/Makefile | 2 +- sysutils/ltfs/Makefile | 2 +- sysutils/lttng-tools/Makefile | 2 +- sysutils/mate-control-center/Makefile | 1 + sysutils/mate-system-monitor/Makefile | 1 + sysutils/nitrogen/Makefile | 2 +- sysutils/osinfo-db-tools/Makefile | 1 + sysutils/pam_mount/Makefile | 2 +- sysutils/pam_xdg/Makefile | 2 +- sysutils/pam_xdg/distinfo | 4 +- sysutils/py-ansible-lint/Makefile | 11 +- sysutils/py-ansible-lint/distinfo | 6 +- sysutils/py-diffoscope/Makefile | 2 +- sysutils/py-diffoscope/distinfo | 6 +- sysutils/py-drmaa/files/patch-2to3 | 28 + sysutils/py-hared/Makefile | 2 +- sysutils/py-hared/files/patch-2to3 | 30 + sysutils/py-hared/files/patch-setup.py | 11 + sysutils/py-mitogen/files/patch-2to3 | 169 + sysutils/py-mqttwarn/Makefile | 5 +- sysutils/py-mqttwarn/distinfo | 6 +- sysutils/py-power/files/patch-2to3 | 24 + sysutils/quicksynergy/Makefile | 2 +- sysutils/restic/Makefile | 12 +- sysutils/restic/distinfo | 26 +- .../rubygem-bolt/files/patch-bolt.gemspec | 2 +- sysutils/rubygem-bundler/Makefile | 2 +- sysutils/rubygem-bundler/distinfo | 6 +- sysutils/rubygem-sys-filesystem/Makefile | 1 + sysutils/shlock/Makefile | 9 +- sysutils/shlock/distinfo | 6 +- sysutils/squashfs-tools/Makefile | 32 +- sysutils/squashfs-tools/distinfo | 6 +- sysutils/squashfs-tools/files/patch-Makefile | 22 +- sysutils/tracker-miners/Makefile | 2 +- sysutils/tracker/Makefile | 2 +- sysutils/tracker3/Makefile | 8 +- sysutils/upower/Makefile | 27 +- sysutils/upower/distinfo | 6 +- sysutils/upower/files/patch-meson.build | 11 + sysutils/upower/pkg-plist | 3 +- sysutils/wimlib/Makefile | 1 + sysutils/xen-tools/Makefile | 2 +- sysutils/xvidcap/Makefile | 2 +- textproc/Makefile | 1 + textproc/R-cran-XML/Makefile | 1 + textproc/R-cran-sass/Makefile | 2 +- textproc/R-cran-sass/distinfo | 6 +- textproc/R-cran-xml2/Makefile | 1 + textproc/apertium/Makefile | 1 + textproc/augeas/Makefile | 2 +- textproc/diffmark/Makefile | 2 +- textproc/docbook2X/Makefile | 2 +- textproc/ebook-tools/Makefile | 2 +- textproc/fpc-libxml2/Makefile | 1 + textproc/gdome2/Makefile | 2 +- textproc/gmetadom/Makefile | 2 +- textproc/gspell/Makefile | 2 +- textproc/lasem/Makefile | 1 + textproc/libabw/Makefile | 1 + textproc/libcroco/Makefile | 1 + textproc/libe-book/Makefile | 2 +- textproc/libextractor/Makefile | 1 + textproc/libfo/Makefile | 2 +- textproc/libfolia/Makefile | 1 + textproc/libgepub/Makefile | 2 +- textproc/liblingoteach/Makefile | 2 +- textproc/libodfgen01/Makefile | 1 + textproc/libtranslate/Makefile | 2 +- textproc/libvisio01/Makefile | 2 +- textproc/libwpd010/Makefile | 2 +- textproc/libxml++26/Makefile | 2 +- textproc/lttoolbox/Makefile | 1 + textproc/p5-JSON-Validator/Makefile | 2 +- textproc/p5-JSON-Validator/distinfo | 6 +- textproc/p5-Lingua-Preferred/Makefile | 2 + textproc/p5-XML-CanonicalizeXML/Makefile | 1 + textproc/p5-XML-LibXSLT/Makefile | 1 + textproc/p5-XML-Liberal/Makefile | 3 +- textproc/p5-XML-Liberal/distinfo | 5 +- textproc/php74-xmlreader/Makefile | 1 + textproc/php74-xsl/Makefile | 1 + textproc/php80-dom/Makefile | 1 + textproc/php80-simplexml/Makefile | 1 + textproc/php80-xml/Makefile | 1 + textproc/php80-xmlreader/Makefile | 1 + textproc/php80-xmlwriter/Makefile | 1 + textproc/php80-xsl/Makefile | 1 + textproc/php81-dom/Makefile | 1 + textproc/php81-simplexml/Makefile | 1 + textproc/php81-xml/Makefile | 1 + textproc/php81-xmlreader/Makefile | 1 + textproc/php81-xmlwriter/Makefile | 1 + textproc/php81-xsl/Makefile | 1 + textproc/py-cmarkgfm/Makefile | 2 +- textproc/py-cmarkgfm/distinfo | 6 +- textproc/py-cssselect2/Makefile | 7 +- textproc/py-cssselect2/distinfo | 6 +- textproc/py-cssselect2/files/setup.py | 34 + textproc/py-elasticsearch/Makefile | 2 +- textproc/py-elasticsearch/distinfo | 6 +- textproc/py-ini2toml/Makefile | 22 + textproc/py-ini2toml/distinfo | 3 + textproc/py-ini2toml/pkg-descr | 10 + textproc/py-isbnlib/Makefile | 2 +- textproc/py-isbnlib/distinfo | 6 +- textproc/py-markdown/Makefile | 24 +- textproc/py-markdown/distinfo | 6 +- textproc/py-markdown/files/patch-setup.py | 10 - textproc/py-markdown/pkg-descr | 27 +- textproc/py-mkdocs-material/Makefile | 4 +- textproc/py-mkdocs-material/distinfo | 6 +- textproc/py-pybtex/Makefile | 1 + textproc/py-pybtex/files/patch-setup.py | 11 + textproc/py-python-lsp-server/Makefile | 2 +- textproc/py-python-lsp-server/distinfo | 6 +- textproc/py-reverend/files/patch-2to3 | 93 + textproc/py-sphinx-tabs/Makefile | 4 +- textproc/py-sphinx-tabs/distinfo | 6 +- textproc/py-sphinx-tabs/files/patch-setup.py | 11 - textproc/py-stemming/files/patch-2to3 | 11 + textproc/py-tinycss/Makefile | 2 + .../patch-wordcloud_query__integral__image.c | 82 + textproc/py-wordnet/Makefile | 4 +- textproc/py-wordnet/files/concordance.py | 128 + textproc/py-wordnet/files/patch-2to3 | 1594 +++++++ textproc/py-xhtml2pdf/Makefile | 12 +- textproc/py-xhtml2pdf/distinfo | 6 +- .../py-zope.structuredtext/files/patch-2to3 | 138 + textproc/py-zope.tal/files/patch-2to3 | 1119 +++++ textproc/py-zpt/files/patch-2to3 | 1071 +++++ textproc/raptor/Makefile | 2 +- textproc/raptor2/Makefile | 2 +- textproc/rubygem-actiontext60/Makefile | 2 +- textproc/rubygem-actiontext60/distinfo | 6 +- textproc/rubygem-actiontext61/Makefile | 2 +- textproc/rubygem-actiontext61/distinfo | 6 +- textproc/rubygem-actiontext70/Makefile | 2 +- textproc/rubygem-actiontext70/distinfo | 6 +- textproc/rubygem-chewy/Makefile | 2 +- textproc/rubygem-chewy/distinfo | 6 +- textproc/rubygem-commonmarker/Makefile | 2 +- textproc/rubygem-commonmarker/distinfo | 6 +- textproc/rubygem-elasticsearch-api/Makefile | 2 +- textproc/rubygem-elasticsearch-api/distinfo | 6 +- .../rubygem-elasticsearch-transport/Makefile | 2 +- .../rubygem-elasticsearch-transport/distinfo | 6 +- textproc/rubygem-elasticsearch-xpack/Makefile | 2 +- textproc/rubygem-elasticsearch-xpack/distinfo | 6 +- textproc/rubygem-elasticsearch/Makefile | 2 +- textproc/rubygem-elasticsearch/distinfo | 6 +- .../rubygem-jekyll-sass-converter/Makefile | 2 +- .../rubygem-jekyll-sass-converter/distinfo | 6 +- textproc/rubygem-libxml-ruby/Makefile | 1 + textproc/rubygem-liquid/Makefile | 2 +- textproc/rubygem-liquid/distinfo | 6 +- textproc/rubygem-nokogiri/Makefile | 1 + textproc/rubygem-nokogiri111/Makefile | 1 + textproc/rubygem-nokogumbo/Makefile | 1 + textproc/rubygem-ruby-augeas/Makefile | 2 +- textproc/rubygem-ruby-xslt/Makefile | 1 + textproc/ssddiff/Makefile | 2 +- textproc/translate-toolkit/Makefile | 6 +- textproc/translate-toolkit/distinfo | 6 +- .../files/patch-requirements-optional.txt | 12 +- textproc/ucto/Makefile | 1 + textproc/wv/Makefile | 2 +- textproc/wv2/Makefile | 2 +- textproc/xmlroff/Makefile | 1 + textproc/xmlstarlet/Makefile | 1 + textproc/xmlwrapp/Makefile | 2 +- textproc/zorba/Makefile | 2 +- www/Makefile | 2 + www/aria2/Makefile | 1 + www/bluefish/Makefile | 1 + www/castget/Makefile | 1 + www/chromium/Makefile | 2 +- www/chromium/distinfo | 10 +- www/codeigniter/Makefile | 18 +- www/codeigniter/distinfo | 6 +- .../files/codeigniter-development-cgi.conf.in | 6 - .../files/codeigniter-development.conf.in | 6 - .../files/codeigniter-production-cgi.conf.in | 6 - .../files/codeigniter-production.conf.in | 6 - www/codeigniter/files/codeigniter.conf.in | 6 - www/codeigniter/pkg-plist | 217 +- www/cssed/Makefile | 2 +- www/davix/Makefile | 1 + www/deno/Makefile | 117 +- www/deno/distinfo | 236 +- www/epiphany/Makefile | 1 + www/flickcurl/Makefile | 2 +- www/gitlab-ce/Makefile | 24 +- www/gitlab-ce/distinfo | 6 +- www/gitlab-ce/files/patch-Gemfile | 22 +- .../files/patch-config_gitlab.yml.example | 14 +- www/gitlab-ce/pkg-message | 4 +- www/gitlab-pages/Makefile | 2 +- www/gitlab-pages/distinfo | 10 +- www/gitlab-workhorse/Makefile | 4 +- www/gitlab-workhorse/distinfo | 6 +- www/gnome-user-share/Makefile | 2 +- www/gstreamer1-plugins-neon/Makefile | 2 +- www/hiawatha/Makefile | 2 +- www/kannel-sqlbox/Makefile | 2 +- www/kannel/Makefile | 2 +- www/midori/Makefile | 2 +- www/mod_authnz_crowd/Makefile | 2 +- www/mod_php74/Makefile | 1 + www/mod_php80/Makefile | 1 + www/mod_php81/Makefile | 1 + www/mod_proxy_xml/Makefile | 2 +- www/mod_xmlns/Makefile | 2 +- www/newsboat/Makefile | 2 +- www/nghttp2/Makefile | 1 + www/nginx-full/Makefile | 2 +- www/onlyoffice-documentserver/Makefile | 2 +- www/p5-CGI-Compile/Makefile | 2 +- www/p5-CGI-Compile/distinfo | 5 +- www/p5-Dancer-Session-Cookie/Makefile | 2 +- www/p5-Dancer-Session-Cookie/distinfo | 6 +- www/p5-Dancer2/Makefile | 2 +- www/p5-Dancer2/distinfo | 6 +- www/p5-Dancer2/pkg-plist | 2 + www/p5-HTTP-Exception/Makefile | 3 +- www/p5-HTTP-Exception/distinfo | 5 +- www/p5-HTTP-Session2/Makefile | 5 +- www/p5-HTTP-Session2/distinfo | 5 +- www/p5-HTTP-Session2/pkg-plist | 2 + www/p5-Session-Storage-Secure/Makefile | 5 +- www/p5-Session-Storage-Secure/distinfo | 5 +- www/pecl-solr/Makefile | 1 + www/py-WebError/files/patch-2to3 | 319 ++ www/py-aioh2/Makefile | 2 +- www/py-aioh2/files/patch-aioh2-helper.py | 8 + www/py-aioh2/files/patch-aioh2-protocol.py | 11 + www/py-bleach/Makefile | 26 +- www/py-bleach/distinfo | 6 +- www/py-bleach/pkg-descr | 18 +- www/py-dj32-django-modelcluster/Makefile | 2 +- www/py-dj32-django-modelcluster/distinfo | 6 +- www/py-django-allauth/Makefile | 10 +- www/py-django-allauth/distinfo | 6 +- www/py-django-allauth/pkg-descr | 5 +- www/py-django-bootstrap-form/Makefile | 3 + www/py-django-hijack/Makefile | 44 +- www/py-django-hijack/distinfo | 8 +- www/py-django-hijack/files/patch-setup.py | 35 + www/py-django-modelcluster/Makefile | 2 +- www/py-django-modelcluster/distinfo | 6 +- www/py-django-sudo/Makefile | 3 + www/py-django/Makefile | 3 + www/py-dtflickr/files/patch-2to3 | 103 + www/py-flask-caching/Makefile | 21 + www/py-flask-caching/distinfo | 3 + www/py-flask-caching/pkg-descr | 5 + www/py-google-api-core/Makefile | 2 +- www/py-google-api-core/distinfo | 6 +- .../Makefile | 4 +- .../distinfo | 6 +- www/py-google-cloud-bigtable/Makefile | 6 +- www/py-google-cloud-bigtable/distinfo | 6 +- www/py-google-cloud-core/Makefile | 6 +- www/py-google-cloud-core/distinfo | 6 +- www/py-google-cloud-datastore/Makefile | 12 +- www/py-google-cloud-datastore/distinfo | 6 +- www/py-google-cloud-dlp/Makefile | 4 +- www/py-google-cloud-dlp/distinfo | 6 +- www/py-google-cloud-speech/Makefile | 12 +- www/py-google-cloud-speech/distinfo | 6 +- www/py-google-cloud-vision/Makefile | 6 +- www/py-google-cloud-vision/distinfo | 6 +- www/py-google-resumable-media/Makefile | 2 +- www/py-google-resumable-media/distinfo | 6 +- www/py-habanero/Makefile | 4 + www/py-habanero/distinfo | 4 +- www/py-html5-parser/Makefile | 1 + www/py-instabot/Makefile | 2 +- www/py-instabot/files/patch-setup.py | 9 + www/py-jonpy/files/patch-2to3 | 398 ++ www/py-nevow/files/patch-2to3 | 4233 +++++++++++++++++ www/py-notebook/Makefile | 2 +- www/py-notebook/distinfo | 6 +- www/py-puppetboard/Makefile | 2 +- www/py-puppetboard/distinfo | 6 +- www/py-py-restclient/files/patch-2to3 | 312 ++ www/py-pysmartdl/Makefile | 1 + www/py-pysmartdl/files/patch-setup.py | 11 + www/py-quilt3/Makefile | 2 +- www/py-quilt3/files/patch-setup.py | 26 +- www/py-requests-oauthlib/Makefile | 12 +- www/py-requests-oauthlib/distinfo | 6 +- www/py-requests-oauthlib/pkg-descr | 2 +- www/py-restclient/files/patch-2to3 | 377 ++ www/py-sentinelhub/Makefile | 2 +- .../files/patch-requirements.txt | 11 - www/py-starlette/Makefile | 6 +- www/py-starlette/distinfo | 6 +- .../files/patch-tornado-test-asyncio_test.py | 12 + www/py-uvicorn/Makefile | 4 +- www/py-uvicorn/distinfo | 6 +- www/py-uvicorn/files/patch-setup.py | 11 - www/py-waitress/Makefile | 4 +- www/py-waitress/distinfo | 6 +- www/py-webunit/files/patch-2to3 | 548 +++ www/py-wikitools/files/patch-2to3 | 235 + www/qt5-webengine/Makefile | 2 +- www/qt5-webkit/Makefile | 2 +- www/redmine4/Makefile | 3 +- www/redmine4/files/patch-Gemfile | 2 +- www/redmine42/Makefile | 3 +- www/redmine42/files/patch-Gemfile | 2 +- www/rssroll/Makefile | 1 + www/rsstool/Makefile | 2 +- www/rubygem-actioncable52/Makefile | 2 +- www/rubygem-actioncable52/distinfo | 6 +- www/rubygem-actioncable60/Makefile | 2 +- www/rubygem-actioncable60/distinfo | 6 +- www/rubygem-actioncable61/Makefile | 2 +- www/rubygem-actioncable61/distinfo | 6 +- www/rubygem-actioncable70/Makefile | 2 +- www/rubygem-actioncable70/distinfo | 6 +- www/rubygem-actionpack52/Makefile | 2 +- www/rubygem-actionpack52/distinfo | 6 +- www/rubygem-actionpack60/Makefile | 2 +- www/rubygem-actionpack60/distinfo | 6 +- www/rubygem-actionpack61/Makefile | 2 +- www/rubygem-actionpack61/distinfo | 6 +- www/rubygem-actionpack70/Makefile | 2 +- www/rubygem-actionpack70/distinfo | 6 +- www/rubygem-cgi/Makefile | 4 +- www/rubygem-cgi/distinfo | 6 +- www/rubygem-jekyll/Makefile | 10 +- www/rubygem-jekyll/distinfo | 6 +- www/rubygem-jsbundling-rails/Makefile | 2 +- www/rubygem-jsbundling-rails/distinfo | 6 +- www/rubygem-rails52/Makefile | 2 +- www/rubygem-rails52/distinfo | 6 +- www/rubygem-rails60/Makefile | 2 +- www/rubygem-rails60/distinfo | 6 +- www/rubygem-rails61-node16/Makefile | 2 +- www/rubygem-rails61-node16/distinfo | 6 +- www/rubygem-rails61/Makefile | 2 +- www/rubygem-rails61/distinfo | 6 +- www/rubygem-rails70/Makefile | 2 +- www/rubygem-rails70/distinfo | 6 +- www/rubygem-railties52/Makefile | 2 +- www/rubygem-railties52/distinfo | 6 +- www/rubygem-railties60/Makefile | 2 +- www/rubygem-railties60/distinfo | 6 +- www/rubygem-railties61/Makefile | 2 +- www/rubygem-railties61/distinfo | 6 +- www/rubygem-railties70/Makefile | 2 +- www/rubygem-railties70/distinfo | 6 +- www/sitecopy/Makefile | 2 +- www/threejs/Makefile | 2 +- www/threejs/distinfo | 6 +- www/threejs/pkg-plist | 2 + www/tidy-html5/Makefile | 1 + www/trafficserver/Makefile | 1 + www/varnish4/Makefile | 2 +- www/varnish6/Makefile | 2 +- www/varnish7/Makefile | 57 + www/varnish7/distinfo | 3 + www/varnish7/files/no-inet6.patch | 8 + www/varnish7/files/varnishd.in | 125 + www/varnish7/files/varnishlog.in | 62 + www/varnish7/files/varnishncsa.in | 77 + www/varnish7/pkg-descr | 12 + www/varnish7/pkg-message | 18 + www/varnish7/pkg-plist | 136 + www/vger/Makefile | 2 +- www/vger/distinfo | 6 +- www/webkit2-gtk3/Makefile | 1 + x11-clocks/cairo-clock/Makefile | 2 +- x11-fm/Makefile | 1 + x11-fm/caja/Makefile | 2 +- x11-fm/fsv2/Makefile | 2 +- x11-fm/librfm/Makefile | 2 +- x11-fm/nautilus-python/Makefile | 1 + x11-fm/nemo/Makefile | 2 +- x11-fm/polo/Makefile | 32 + x11-fm/polo/distinfo | 3 + .../polo/files/patch-Gtk_ProgressPanel.vala | 11 + x11-fm/polo/files/patch-Gtk_TermBox.vala | 11 + .../polo/files/patch-Utility_AsyncTask.vala | 11 + .../polo/files/patch-Utility_MediaFile.vala | 11 + x11-fm/polo/files/patch-Utility_SysInfo.vala | 32 + .../files/patch-Utility_TeeJee.Process.vala | 29 + x11-fm/polo/files/patch-makefile | 32 + ...are_polo_files_gtk-theme_install-gtk-theme | 17 + x11-fm/polo/pkg-descr | 7 + x11-fm/polo/pkg-plist | 847 ++++ x11-fm/rodent/Makefile | 2 +- x11-fm/rox-filer/Makefile | 2 +- x11-fm/sushi/Makefile | 1 + x11-fonts/font-manager/Makefile | 1 + x11-fonts/py-ufoprocessor/Makefile | 3 +- x11-themes/adapta-backgrounds/Makefile | 1 + x11-themes/adapta-gtk-theme/Makefile | 2 +- x11-themes/plata-theme/Makefile | 1 + x11-toolkits/copperspice/Makefile | 2 +- x11-toolkits/gdl/Makefile | 1 + x11-toolkits/gnocl/Makefile | 2 +- x11-toolkits/gstreamer1-plugins-gtk/Makefile | 2 +- .../gstreamer1-plugins-pango/Makefile | 2 +- x11-toolkits/gtk-sharp20/Makefile | 2 +- x11-toolkits/gtkmathview/Makefile | 2 +- x11-toolkits/gtkmm24/Makefile | 2 +- x11-toolkits/gtkmm30/Makefile | 2 +- x11-toolkits/gtksourceview2/Makefile | 2 +- x11-toolkits/gtksourceview3/Makefile | 1 + x11-toolkits/gtksourceview4/Makefile | 1 + x11-toolkits/gtksourceview5/Makefile | 1 + x11-toolkits/gtksourceviewmm3/Makefile | 1 + x11-toolkits/guile-gnome-platform/Makefile | 2 +- x11-toolkits/libadwaita/Makefile | 1 - x11-toolkits/libsexy/Makefile | 2 +- x11-toolkits/ocaml-lablgtk2/Makefile | 2 +- x11-toolkits/p5-Glade2/Makefile | 2 +- x11-toolkits/p5-Gtk2-GladeXML/Makefile | 1 + x11-toolkits/pangomm/Makefile | 2 +- x11-toolkits/py-qt5-chart/Makefile | 1 + x11-toolkits/rubygem-gtksourceview3/Makefile | 1 + x11-toolkits/rubygem-gtksourceview4/Makefile | 1 + x11-toolkits/tepl/Makefile | 2 +- x11-toolkits/tepl6/Makefile | 2 +- x11-wm/compiz-plugins-extra/Makefile | 2 +- x11-wm/compiz-plugins-main/Makefile | 2 +- x11-wm/compiz-plugins-unsupported/Makefile | 2 +- x11-wm/compiz/Makefile | 2 +- x11-wm/compizconfig-backend-gconf/Makefile | 2 +- x11-wm/labwc/Makefile | 1 + x11-wm/libcompizconfig/Makefile | 1 + x11-wm/lxappearance-obconf/Makefile | 2 +- x11-wm/lxsession/Makefile | 1 + x11-wm/obconf-qt/Makefile | 1 + x11-wm/obconf/Makefile | 2 +- x11-wm/openbox/Makefile | 2 +- x11-wm/phoc/Makefile | 4 +- x11-wm/phoc/Makefile.wlroots | 2 +- x11-wm/phoc/distinfo | 10 +- x11-wm/qtile/Makefile | 2 +- x11/alltray/Makefile | 2 +- x11/apwal/Makefile | 2 +- x11/cinnamon-screensaver/Makefile | 1 + x11/cinnamon/Makefile | 2 +- x11/gdm/Makefile | 1 + x11/gdm/files/patch-data_Init.in | 66 - x11/gdm/files/patch-data_PostSession.in | 14 +- x11/gdm/files/patch-data_PreSession.in | 14 - x11/gnome-shell/Makefile | 1 + .../files/patch-js_ui_environment.js | 10 + x11/gnome-terminal/Makefile | 1 + x11/gnome/Makefile | 2 +- x11/gstreamer1-plugins-x/Makefile | 2 +- x11/gstreamer1-plugins-ximagesrc/Makefile | 2 +- x11/gsynaptics/Makefile | 2 +- x11/jgmenu/Makefile | 1 + x11/keyboardcast/Makefile | 2 +- x11/libxkbcommon/Makefile | 1 + x11/libxklavier/Makefile | 2 +- x11/lxpanel/Makefile | 1 + x11/mate-applets/Makefile | 1 + x11/mate-session-manager/Makefile | 1 + x11/nwg-launchers/Makefile | 1 + x11/pipeglade/Makefile | 1 + x11/plank/Makefile | 1 + x11/polybar/Makefile | 26 +- x11/polybar/distinfo | 6 +- x11/polybar/files/patch-CMakeLists.txt | 11 + x11/polybar/files/patch-src_utils_file.cpp | 11 + x11/polybar/pkg-plist | 3 +- x11/py-pyscreenshot/files/patch-setup.py | 12 + x11/roxterm/Makefile | 1 + x11/simdock/Makefile | 2 +- x11/swayr/Makefile | 10 +- x11/swayr/distinfo | 22 +- x11/swaysettings/Makefile | 1 + x11/waybar/Makefile | 1 + x11/wbar/Makefile | 2 +- x11/wcm/Makefile | 1 + x11/wf-shell/Makefile | 2 +- x11/wl-mirror/Makefile | 6 +- x11/wl-mirror/distinfo | 10 +- x11/workrave/Makefile | 1 + x11/xapp/Makefile | 1 + x11/xfce4-screenshooter-plugin/Makefile | 1 + x11/xfce4-terminal/Makefile | 2 +- x11/xkeyboard-config/Makefile | 1 + x11/xscreensaver/Makefile | 2 +- x11/xsnow/Makefile | 1 + x11/yelp/Makefile | 1 + 2055 files changed, 52888 insertions(+), 4136 deletions(-) rename archivers/py-bup/files/{patch-t_test-sparse-files.sh => patch-test_ext_test-sparse-files} (69%) create mode 100644 astro/py-pysofa/files/patch-2to3 create mode 100644 astro/py-ro/files/patch-2to3 create mode 100644 audio/icecast/files/patch-configure create mode 100644 audio/py-apetag/files/patch-2to3 create mode 100644 audio/py-mpd/files/patch-2to3 create mode 100644 benchmarks/py-naarad/files/patch-2to3 create mode 100644 biology/checkm/files/patch-2to3 create mode 100644 biology/groopm/files/patch-2to3 create mode 100644 biology/py-crossmap/files/patch-2to3 create mode 100644 biology/py-ont-fast5-api/files/patch-setup.py create mode 100644 biology/python-nexus/files/patch-2to3 create mode 100644 databases/buzhug/files/patch-2to3 create mode 100644 databases/py-Elixir/files/patch-2to3 create mode 100644 databases/py-dbf/files/patch-2to3 create mode 100644 databases/py-motor/files/patch-asyncio create mode 100644 databases/py-sqlobject/files/patch-2to3 create mode 100644 databases/py-zodbpickle/files/patch-2to3 create mode 100644 deskutils/py-pystash/files/patch-2to3 create mode 100644 devel/codeville/files/patch-indent create mode 100644 devel/libpafe-ruby/files/patch-extconf.rb create mode 100644 devel/libsigrokdecode/files/patch-configure create mode 100644 devel/ocaml-sdl/files/patch-src_Makefile create mode 100644 devel/ocaml-sdl/files/patch-src_sdlmouse.ml create mode 100644 devel/py-DateTime/files/patch-2to3 create mode 100644 devel/py-Products.ExternalEditor/files/patch-2to3 create mode 100644 devel/py-aiosignal/Makefile create mode 100644 devel/py-aiosignal/distinfo create mode 100644 devel/py-aiosignal/pkg-descr create mode 100644 devel/py-anyjson/files/patch-2to3 create mode 100644 devel/py-apache_conf_parser/files/patch-indent delete mode 100644 devel/py-asyncio/Makefile delete mode 100644 devel/py-asyncio/distinfo delete mode 100644 devel/py-asyncio/pkg-descr create mode 100644 devel/py-behave/files/patch-setup.py create mode 100644 devel/py-cdg/files/patch-2to3 create mode 100644 devel/py-daemon-runner/files/patch-2to3 create mode 100644 devel/py-dal/files/patch-2to3 create mode 100644 devel/py-datadog/files/setup.py delete mode 100644 devel/py-distributed/files/patch-requirements.txt create mode 100644 devel/py-epc/Makefile create mode 100644 devel/py-epc/distinfo create mode 100644 devel/py-epc/pkg-descr create mode 100644 devel/py-extremes/files/patch-2to3 create mode 100644 devel/py-grizzled/files/patch-2to3 create mode 100644 devel/py-hash_ring/files/patch-2to3 create mode 100644 devel/py-minimongo/files/patch-2to3 create mode 100644 devel/py-mongokit/files/patch-2to3 create mode 100644 devel/py-notebook-shim/Makefile create mode 100644 devel/py-notebook-shim/distinfo create mode 100644 devel/py-notebook-shim/pkg-descr create mode 100644 devel/py-omnijson/files/patch-2to3 create mode 100644 devel/py-optik/files/patch-2to3 delete mode 100644 devel/py-os-brick/files/patch-requirements.txt create mode 100644 devel/py-pastel/files/patch-setup.py create mode 100644 devel/py-path/files/setup.py create mode 100644 devel/py-plex/files/patch-2to3 create mode 100644 devel/py-pycalendar/files/patch-2to3 create mode 100644 devel/py-pydevd/files/patch-2to3 create mode 100644 devel/py-pygpx/files/patch-2to3 create mode 100644 devel/py-pyrepl/files/patch-2to3 create mode 100644 devel/py-pytest-subtests/Makefile create mode 100644 devel/py-pytest-subtests/distinfo create mode 100644 devel/py-pytest-subtests/pkg-descr create mode 100644 devel/py-python-application/files/patch-2to3 create mode 100644 devel/py-simpleparse/files/patch-setup.py create mode 100644 devel/py-simpletal/files/patch-2to3 create mode 100644 devel/py-stsci.distutils/files/patch-2to3 create mode 100644 devel/py-testoob/files/patch-2to3 create mode 100644 devel/py-urlimport/files/patch-2to3 create mode 100644 devel/py-yapps2/files/patch-2to3 create mode 100644 devel/py-z3c.autoinclude/files/patch-2to3 create mode 100644 devel/py-zope.cachedescriptors/files/patch-2to3 create mode 100644 devel/py-zope.contenttype/files/patch-2to3 create mode 100644 devel/py-zope.datetime/files/patch-2to3 create mode 100644 devel/py-zope.deferredimport/files/patch-2to3 create mode 100644 devel/py-zope.generations/files/patch-2to3 create mode 100644 devel/py-zope.i18n/files/patch-2to3 create mode 100644 devel/py-zope.sequencesort/files/patch-2to3 create mode 100644 devel/py-zope.size/files/patch-2to3 create mode 100644 devel/rubygem-aws-sdk-keyspaces/Makefile create mode 100644 devel/rubygem-aws-sdk-keyspaces/distinfo create mode 100644 devel/rubygem-aws-sdk-keyspaces/pkg-descr create mode 100644 devel/rubygem-error_highlight/Makefile create mode 100644 devel/rubygem-error_highlight/distinfo create mode 100644 devel/rubygem-error_highlight/pkg-descr create mode 100644 devel/rubygem-que-scheduler/Makefile create mode 100644 devel/rubygem-que-scheduler/distinfo create mode 100644 devel/rubygem-que-scheduler/pkg-descr create mode 100644 devel/rubygem-sentry-rails/Makefile create mode 100644 devel/rubygem-sentry-rails/distinfo create mode 100644 devel/rubygem-sentry-rails/pkg-descr create mode 100644 devel/rubygem-sentry-ruby-core/Makefile create mode 100644 devel/rubygem-sentry-ruby-core/distinfo create mode 100644 devel/rubygem-sentry-ruby-core/pkg-descr create mode 100644 devel/rubygem-sentry-ruby/Makefile create mode 100644 devel/rubygem-sentry-ruby/distinfo create mode 100644 devel/rubygem-sentry-ruby/pkg-descr create mode 100644 devel/rubygem-sentry-sidekiq/Makefile create mode 100644 devel/rubygem-sentry-sidekiq/distinfo create mode 100644 devel/rubygem-sentry-sidekiq/pkg-descr create mode 100644 devel/rubygem-view_component-rails61/Makefile create mode 100644 devel/rubygem-view_component-rails61/distinfo create mode 100644 devel/rubygem-view_component-rails61/pkg-descr create mode 100644 devel/rubygem-xdg3/files/patch-gemspec create mode 100644 devel/rubygem-xdg4/files/patch-gemspec delete mode 100644 editors/ghostwriter/files/patch-src_MarkdownEditor.cpp create mode 100644 finance/py-python-obelisk/files/patch-2to3 create mode 100644 games/0ad/files/setuptools.diff create mode 100644 games/yquake2/Makefile create mode 100644 games/yquake2/distinfo create mode 100644 games/yquake2/files/patch-Makefile create mode 100644 games/yquake2/files/patch-src_client_vid_vid.c create mode 100644 games/yquake2/files/patch-src_common_filesystem.c create mode 100644 games/yquake2/pkg-descr create mode 100644 games/yquake2/pkg-plist create mode 100644 graphics/gdk-pixbuf2/files/gdk-pixbuf-query-loaders.ucl.in create mode 100644 graphics/opencollada/files/patch-clang13 create mode 100644 graphics/piddle/files/patch-2to3 create mode 100644 graphics/py-beziers/files/patch-setup.py create mode 100644 graphics/py-pycha/files/patch-2to3 create mode 100644 lang/crystal/files/extra-patch-src_openssl_lib__crypto.cr create mode 100644 lang/crystal/files/extra-patch-src_openssl_lib__ssl.cr create mode 100644 lang/mono/files/extra-patch-aarch64-race-workaround create mode 100644 lang/mono5.10/files/extra-patch-aarch64-race-workaround create mode 100644 lang/php80/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h create mode 100644 lang/php81/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h create mode 100644 mail/dovecot-fts-xapian/files/patch-src_fts-backend-xapian.cpp create mode 100644 mail/py-Products.SecureMailHost/files/patch-2to3 delete mode 100644 mail/rubygem-mail/files/patch-mail.gemspec create mode 100644 math/py-fraction/Makefile create mode 100644 math/py-fraction/distinfo create mode 100644 math/py-fraction/pkg-descr create mode 100644 math/py-iohexperimenter/files/patch-setup.py create mode 100644 math/py-moarchiving/Makefile create mode 100644 math/py-moarchiving/distinfo create mode 100644 math/py-moarchiving/pkg-descr delete mode 100644 math/py-mutatormath/Makefile delete mode 100644 math/py-mutatormath/distinfo delete mode 100644 math/py-mutatormath/pkg-descr create mode 100644 math/py-pdal/files/patch-2to3 create mode 100644 math/py-pybloom/files/patch-2to3 create mode 100644 math/py-svgmath/files/patch-2to3 create mode 100644 misc/py-kartograph/files/patch-2to3 create mode 100644 misc/py-onnx-tf/files/patch-setup.py create mode 100644 misc/py-xgboost/files/patch-2to3 create mode 100644 misc/shared-mime-info/files/patch-data_freedesktop.org.xml.in delete mode 100644 misc/shared-mime-info/files/patch-data_freedesktop__generate.sh create mode 100644 misc/shared-mime-info/files/patch-meson.build create mode 100644 multimedia/ffmpeg/files/patch-dav1d delete mode 100644 multimedia/libmediainfo/files/patch-Makefile.am create mode 100644 multimedia/libmediainfo/files/patch-Makefile.in create mode 100644 net-im/py-punjab/files/patch-2to3 create mode 100644 net-p2p/py-vertex/files/patch-2to3 create mode 100644 net/py-amqplib/files/patch-2to3 create mode 100644 net/py-matrix-synapse-ldap3/files/setup.py create mode 100644 net/py-stomp.py/files/patch-2to3 create mode 100644 net/py-tofu/files/patch-2to3 create mode 100644 print/py-pdf/files/patch-2to3 create mode 100644 print/py-pypdf3/Makefile create mode 100644 print/py-pypdf3/distinfo create mode 100644 print/py-pypdf3/pkg-descr delete mode 100644 print/py-uharfbuzz/files/patch-harfbuzz4 create mode 100644 science/py-oddt/files/patch-setup.py create mode 100644 science/py-paida/files/patch-2to3 create mode 100644 science/py-qspin/files/patch-2to3 create mode 100644 science/py-segyio/files/patch-2to3 create mode 100644 security/belier/files/patch-2to3 create mode 100644 security/hashcat/files/patch-deps_LZMA-SDK_C_CpuArch.c create mode 100644 security/py-RestrictedPython/files/patch-2to3 create mode 100644 security/py-cerealizer/files/patch-2to3 create mode 100644 security/py-gnutls/files/patch-2to3 create mode 100644 security/py-pyaxo/files/patch-2to3 create mode 100644 security/py-pysodium/files/patch-setup.py create mode 100644 security/py-python-openid/files/patch-2to3 create mode 100644 security/py-tlslite-ng/Makefile create mode 100644 security/py-tlslite-ng/distinfo create mode 100644 security/py-tlslite-ng/pkg-descr create mode 100644 security/py-tlslite/files/patch-async create mode 100644 security/py-zope.password/files/patch-2to3 create mode 100644 sysutils/gnome-control-center/files/patch-panels_user-accounts_cc-realm-manager.c create mode 100644 sysutils/py-drmaa/files/patch-2to3 create mode 100644 sysutils/py-hared/files/patch-2to3 create mode 100644 sysutils/py-hared/files/patch-setup.py create mode 100644 sysutils/py-mitogen/files/patch-2to3 create mode 100644 sysutils/py-power/files/patch-2to3 create mode 100644 sysutils/upower/files/patch-meson.build create mode 100644 textproc/py-cssselect2/files/setup.py create mode 100644 textproc/py-ini2toml/Makefile create mode 100644 textproc/py-ini2toml/distinfo create mode 100644 textproc/py-ini2toml/pkg-descr delete mode 100644 textproc/py-markdown/files/patch-setup.py create mode 100644 textproc/py-pybtex/files/patch-setup.py create mode 100644 textproc/py-reverend/files/patch-2to3 delete mode 100644 textproc/py-sphinx-tabs/files/patch-setup.py create mode 100644 textproc/py-stemming/files/patch-2to3 create mode 100644 textproc/py-wordcloud/files/patch-wordcloud_query__integral__image.c create mode 100644 textproc/py-wordnet/files/concordance.py create mode 100644 textproc/py-wordnet/files/patch-2to3 create mode 100644 textproc/py-zope.structuredtext/files/patch-2to3 create mode 100644 textproc/py-zope.tal/files/patch-2to3 create mode 100644 textproc/py-zpt/files/patch-2to3 create mode 100644 www/py-WebError/files/patch-2to3 create mode 100644 www/py-aioh2/files/patch-aioh2-helper.py create mode 100644 www/py-aioh2/files/patch-aioh2-protocol.py create mode 100644 www/py-django-hijack/files/patch-setup.py create mode 100644 www/py-dtflickr/files/patch-2to3 create mode 100644 www/py-flask-caching/Makefile create mode 100644 www/py-flask-caching/distinfo create mode 100644 www/py-flask-caching/pkg-descr create mode 100644 www/py-instabot/files/patch-setup.py create mode 100644 www/py-jonpy/files/patch-2to3 create mode 100644 www/py-nevow/files/patch-2to3 create mode 100644 www/py-py-restclient/files/patch-2to3 create mode 100644 www/py-pysmartdl/files/patch-setup.py create mode 100644 www/py-restclient/files/patch-2to3 delete mode 100644 www/py-sentinelhub/files/patch-requirements.txt create mode 100644 www/py-tornado4/files/patch-tornado-test-asyncio_test.py delete mode 100644 www/py-uvicorn/files/patch-setup.py create mode 100644 www/py-webunit/files/patch-2to3 create mode 100644 www/py-wikitools/files/patch-2to3 create mode 100644 www/varnish7/Makefile create mode 100644 www/varnish7/distinfo create mode 100644 www/varnish7/files/no-inet6.patch create mode 100644 www/varnish7/files/varnishd.in create mode 100644 www/varnish7/files/varnishlog.in create mode 100644 www/varnish7/files/varnishncsa.in create mode 100644 www/varnish7/pkg-descr create mode 100644 www/varnish7/pkg-message create mode 100644 www/varnish7/pkg-plist create mode 100644 x11-fm/polo/Makefile create mode 100644 x11-fm/polo/distinfo create mode 100644 x11-fm/polo/files/patch-Gtk_ProgressPanel.vala create mode 100644 x11-fm/polo/files/patch-Gtk_TermBox.vala create mode 100644 x11-fm/polo/files/patch-Utility_AsyncTask.vala create mode 100644 x11-fm/polo/files/patch-Utility_MediaFile.vala create mode 100644 x11-fm/polo/files/patch-Utility_SysInfo.vala create mode 100644 x11-fm/polo/files/patch-Utility_TeeJee.Process.vala create mode 100644 x11-fm/polo/files/patch-makefile create mode 100644 x11-fm/polo/files/patch-share_polo_files_gtk-theme_install-gtk-theme create mode 100644 x11-fm/polo/pkg-descr create mode 100644 x11-fm/polo/pkg-plist delete mode 100644 x11/gdm/files/patch-data_Init.in delete mode 100644 x11/gdm/files/patch-data_PreSession.in create mode 100644 x11/gnome-shell/files/patch-js_ui_environment.js create mode 100644 x11/polybar/files/patch-CMakeLists.txt create mode 100644 x11/polybar/files/patch-src_utils_file.cpp create mode 100644 x11/py-pyscreenshot/files/patch-setup.py diff --git a/accessibility/caribou/Makefile b/accessibility/caribou/Makefile index df45e65ff4d..283ee8d504d 100644 --- a/accessibility/caribou/Makefile +++ b/accessibility/caribou/Makefile @@ -2,7 +2,7 @@ PORTNAME= caribou PORTVERSION= 0.4.21 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= accessibility gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/archivers/file-roller/Makefile b/archivers/file-roller/Makefile index 9d560030cf4..6b7f53b9256 100644 --- a/archivers/file-roller/Makefile +++ b/archivers/file-roller/Makefile @@ -2,7 +2,7 @@ PORTNAME= file-roller PORTVERSION= 3.40.0 -PORTREVISION= 1 +PORTREVISION= 2 PORTEPOCH= 1 CATEGORIES= archivers gnome MASTER_SITES= GNOME diff --git a/archivers/py-bup/Makefile b/archivers/py-bup/Makefile index c1c04d0cbdf..d1ebf231faa 100644 --- a/archivers/py-bup/Makefile +++ b/archivers/py-bup/Makefile @@ -1,7 +1,7 @@ # Created by: José García Juanino PORTNAME= bup -PORTVERSION= 0.31 +PORTVERSION= 0.32 CATEGORIES= archivers python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -17,9 +17,11 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pylibacl>0:security/py-pylibacl@${PY_FLAVOR} par2cmdline>0.4:archivers/par2cmdline BUILD_DEPENDS= bash:shells/bash \ git>1.5.6:devel/git +RUN_DEPENDS= bash:shells/bash -USES= python:3.6+ gmake +USES= gmake python:3.6+ shebangfix USE_PYTHON= autoplist +SHEBANG_FILES= test/ext/test-sparse-files lib/cmd/import-rdiff-backup-cmd.sh PLIST_SUB= MANDIR=${PREFIX}/share/man diff --git a/archivers/py-bup/distinfo b/archivers/py-bup/distinfo index 5274ed8ba9a..08423a93dfb 100644 --- a/archivers/py-bup/distinfo +++ b/archivers/py-bup/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1608064432 -SHA256 (bup-bup-0.31_GH0.tar.gz) = 2f54351aed653b4b9567d3a534af598a5bc63b32efd7cc593bcecac3b89e16d1 -SIZE (bup-bup-0.31_GH0.tar.gz) = 454636 +TIMESTAMP = 1646238825 +SHA256 (bup-bup-0.32_GH0.tar.gz) = a894cfa96c44b9ef48003b2c2104dc5fa6361dd2f4d519261a93178984a51259 +SIZE (bup-bup-0.32_GH0.tar.gz) = 448487 diff --git a/archivers/py-bup/files/patch-Makefile b/archivers/py-bup/files/patch-Makefile index 83685eaab39..a06868c6611 100644 --- a/archivers/py-bup/files/patch-Makefile +++ b/archivers/py-bup/files/patch-Makefile @@ -1,11 +1,11 @@ ---- Makefile.orig 2020-12-15 12:37:40.871053000 -0800 -+++ Makefile 2020-12-15 12:38:46.984653000 -0800 -@@ -28,7 +28,7 @@ +--- Makefile.orig 2021-01-09 22:11:10 UTC ++++ Makefile +@@ -28,7 +28,7 @@ current_sampledata := test/sampledata/var/rev/v$(sampl os := $(shell ($(pf); uname | sed 's/[-_].*//') $(isok)) os := $(call shout,$(os),Unable to determine OS) --CFLAGS := -Wall -Wformat=2 -O2 -Werror -Wno-unknown-pragmas $(CFLAGS) -+CFLAGS := -Wall -O2 -Wno-unknown-pragmas $(PYINCLUDE) $(CFLAGS) +-CFLAGS := -O2 -Wall -Werror -Wformat=2 $(CFLAGS) ++CFLAGS := -Wall $(CFLAGS) + CFLAGS := -Wno-unknown-pragmas -Wsign-compare $(CFLAGS) CFLAGS := -D_FILE_OFFSET_BITS=64 $(PYINCLUDE) $(CFLAGS) SOEXT:=.so - diff --git a/archivers/py-bup/files/patch-config_configure b/archivers/py-bup/files/patch-config_configure index e0db82ed7f2..71df4407132 100644 --- a/archivers/py-bup/files/patch-config_configure +++ b/archivers/py-bup/files/patch-config_configure @@ -1,13 +1,10 @@ ---- config/configure.orig 2020-12-15 16:52:00.017999000 -0800 -+++ config/configure 2020-12-15 16:53:04.758080000 -0800 -@@ -66,6 +66,10 @@ +--- config/configure.orig 2021-01-09 22:11:10 UTC ++++ config/configure +@@ -66,6 +66,7 @@ expr "$MAKE_VERSION" '>=' '3.81' || AC_FAIL "ERROR: $M AC_SUB bup_make "$MAKE" bup_python="$(type -p "$PYTHON")" +test -z "$bup_python" && bup_python="$(bup_find_prog python3.9 '')" -+test -z "$bup_python" && bup_python="$(bup_find_prog python3.8 '')" -+test -z "$bup_python" && bup_python="$(bup_find_prog python3.7 '')" -+test -z "$bup_python" && bup_python="$(bup_find_prog python3.6 '')" - test -z "$bup_python" && bup_python="$(bup_find_prog python2.7 '')" - test -z "$bup_python" && bup_python="$(bup_find_prog python2.6 '')" - test -z "$bup_python" && bup_python="$(bup_find_prog python2 '')" + test -z "$bup_python" && bup_python="$(bup_find_prog python3.8 '')" + test -z "$bup_python" && bup_python="$(bup_find_prog python3.7 '')" + test -z "$bup_python" && bup_python="$(bup_find_prog python3.6 '')" diff --git a/archivers/py-bup/files/patch-t_test-sparse-files.sh b/archivers/py-bup/files/patch-test_ext_test-sparse-files similarity index 69% rename from archivers/py-bup/files/patch-t_test-sparse-files.sh rename to archivers/py-bup/files/patch-test_ext_test-sparse-files index 3df7cb54893..08f6217a45d 100644 --- a/archivers/py-bup/files/patch-t_test-sparse-files.sh +++ b/archivers/py-bup/files/patch-test_ext_test-sparse-files @@ -1,6 +1,6 @@ ---- t/test-sparse-files.sh.orig 2020-12-15 12:42:46.593767000 -0800 -+++ t/test-sparse-files.sh 2020-12-15 12:43:10.570419000 -0800 -@@ -31,6 +31,12 @@ +--- test/ext/test-sparse-files.orig 2021-01-09 22:11:10 UTC ++++ test/ext/test-sparse-files +@@ -31,6 +31,12 @@ if [ "$probe_size" -ge "$((data_size / 1024))" ]; then exit 0 fi diff --git a/archivers/py-zopfli/Makefile b/archivers/py-zopfli/Makefile index a610d671d2a..be1fd65fe33 100644 --- a/archivers/py-zopfli/Makefile +++ b/archivers/py-zopfli/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= zopfli -PORTVERSION= 0.1.9 +PORTVERSION= 0.2.1 CATEGORIES= archivers python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,11 +13,15 @@ LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/COPYING BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}setuptools_scm>=0:devel/py-setuptools_scm@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0,1:devel/py-pytest@${PY_FLAVOR} -USES= python:3.6+ zip +USES= python:3.7+ zip USE_PYTHON= autoplist concurrent distutils post-install: ${FIND} ${STAGEDIR}${PYTHON_SITELIBDIR} -name '*.so' -exec ${STRIP_CMD} {} + +do-test: + cd ${WRKSRC} && ${SETENV} PYTHONPATH=${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} ${PYTHON_CMD} -m pytest -rs -v + .include diff --git a/archivers/py-zopfli/distinfo b/archivers/py-zopfli/distinfo index 31b8f76b478..31cbe26bb76 100644 --- a/archivers/py-zopfli/distinfo +++ b/archivers/py-zopfli/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1635181027 -SHA256 (zopfli-0.1.9.zip) = 78de3cc08a8efaa8013d61528907d91ac4d6cc014ffd8a41cc10ee75e9e60d7b -SIZE (zopfli-0.1.9.zip) = 79873 +TIMESTAMP = 1647264490 +SHA256 (zopfli-0.2.1.zip) = e5263d2806e2c1ccb23f52b2972a235d31d42f22f3fa3032cc9aded51e9bf2c6 +SIZE (zopfli-0.2.1.zip) = 205086 diff --git a/archivers/xar/Makefile b/archivers/xar/Makefile index 84ef0b003a1..ead25b18036 100644 --- a/archivers/xar/Makefile +++ b/archivers/xar/Makefile @@ -2,6 +2,7 @@ PORTNAME= xar PORTVERSION= 1.6.1 +PORTREVISION= 1 CATEGORIES= archivers MASTER_SITES= https://github.com/downloads/mackyle/xar/ \ GENTOO diff --git a/archivers/xarchiver/Makefile b/archivers/xarchiver/Makefile index 92e644bd9ef..42eabb01679 100644 --- a/archivers/xarchiver/Makefile +++ b/archivers/xarchiver/Makefile @@ -2,6 +2,7 @@ PORTNAME= xarchiver PORTVERSION= 0.5.4.17 +PORTREVISION= 1 CATEGORIES= archivers MAINTAINER= ports@FreeBSD.org diff --git a/astro/foxtrotgps/Makefile b/astro/foxtrotgps/Makefile index 478a8604ee2..6d8ffbc6565 100644 --- a/astro/foxtrotgps/Makefile +++ b/astro/foxtrotgps/Makefile @@ -2,7 +2,7 @@ PORTNAME= foxtrotgps PORTVERSION= 1.2.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= astro geography MASTER_SITES= http://www.foxtrotgps.org/releases/ diff --git a/astro/gpscorrelate/Makefile b/astro/gpscorrelate/Makefile index 0b7b12b934a..9d7f168c7d3 100644 --- a/astro/gpscorrelate/Makefile +++ b/astro/gpscorrelate/Makefile @@ -2,7 +2,7 @@ PORTNAME= gpscorrelate PORTVERSION= 1.6.1 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= astro geography MAINTAINER= ports@FreeBSD.org diff --git a/astro/kosmorro/Makefile b/astro/kosmorro/Makefile index d3b96c139c9..248f746154d 100644 --- a/astro/kosmorro/Makefile +++ b/astro/kosmorro/Makefile @@ -1,7 +1,7 @@ # Created by: thierry@pompo.net PORTNAME= kosmorro -PORTVERSION= 0.10.9 +PORTVERSION= 0.10.10 CATEGORIES= astro python MASTER_SITES= CHEESESHOP @@ -10,18 +10,22 @@ COMMENT= Ephemerides computation LICENSE= AGPLv3 -BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}tabulate>0:devel/py-tabulate@${PY_FLAVOR}\ +BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}Babel>0:devel/py-babel@${PY_FLAVOR}\ + ${PYTHON_PKGNAMEPREFIX}tabulate>0:devel/py-tabulate@${PY_FLAVOR}\ + ${PYTHON_PKGNAMEPREFIX}Babel>0:devel/py-babel@${PY_FLAVOR}\ ${PYTHON_PKGNAMEPREFIX}dateutil>0:devel/py-dateutil@${PY_FLAVOR}\ ${PYTHON_PKGNAMEPREFIX}termcolor>0:devel/py-termcolor@${PY_FLAVOR}\ - ${PYTHON_PKGNAMEPREFIX}kosmorrolib>0:astro/py-kosmorrolib@${PY_FLAVOR} -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}tabulate>0:devel/py-tabulate@${PY_FLAVOR}\ + ${PYTHON_PKGNAMEPREFIX}kosmorrolib>0:astro/py-kosmorrolib@${PY_FLAVOR}\ + ${PYTHON_PKGNAMEPREFIX}importlib-metadata>0:devel/py-importlib-metadata@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}Babel>0:devel/py-babel@${PY_FLAVOR}\ + ${PYTHON_PKGNAMEPREFIX}tabulate>0:devel/py-tabulate@${PY_FLAVOR}\ ${PYTHON_PKGNAMEPREFIX}dateutil>0:devel/py-dateutil@${PY_FLAVOR}\ ${PYTHON_PKGNAMEPREFIX}termcolor>0:devel/py-termcolor@${PY_FLAVOR}\ ${PYTHON_PKGNAMEPREFIX}kosmorrolib>0:astro/py-kosmorrolib@${PY_FLAVOR} #USE_GITHUB= yes -USES= python:3.7+ shebangfix +USES= gettext-runtime:run python:3.7+ shebangfix USE_PYTHON= distutils noflavors SHEBANG_FILES= ${PORTNAME} @@ -32,7 +36,4 @@ PDF_RUN_DEPENDS=pdflatex:print/tex-formats NO_ARCH= yes -post-install: - ${FIND} ${STAGEDIR}${PYTHON_SITELIBDIR}/_kosmorro/locales -name "*.mo" -delete - .include diff --git a/astro/kosmorro/distinfo b/astro/kosmorro/distinfo index 3202258231f..43ac0b5cd80 100644 --- a/astro/kosmorro/distinfo +++ b/astro/kosmorro/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642019829 -SHA256 (kosmorro-0.10.9.tar.gz) = c08a6ae055cf91b92d535910c33b3a34d65472f62486d726b53e1a4b85ac90ab -SIZE (kosmorro-0.10.9.tar.gz) = 518133 +TIMESTAMP = 1648234300 +SHA256 (kosmorro-0.10.10.tar.gz) = e9283750023446f2af54b2fbd60a7e0e82a01bfe48525b6040ee4e185fc6d091 +SIZE (kosmorro-0.10.10.tar.gz) = 513964 diff --git a/astro/kosmorro/pkg-plist b/astro/kosmorro/pkg-plist index c8ee4ae38ec..2d576512aab 100644 --- a/astro/kosmorro/pkg-plist +++ b/astro/kosmorro/pkg-plist @@ -1,72 +1,75 @@ bin/kosmorro -%%PYTHON_SITELIBDIR%%/_kosmorro/__init__.py -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/__version__%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/__version__%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/date%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/date%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/debug%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/debug%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/dumper%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/dumper%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/environment%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/environment%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/exceptions%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/exceptions%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/main%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__pycache__/main%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/__version__.py -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/first-quarter.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/full-moon.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/last-quarter.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/new-moon.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/unknown.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/waning-crescent.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/waning-gibbous.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/waxing-crescent.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/png/waxing-gibbous.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/first-quarter.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/full-moon.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/last-quarter.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/new-moon.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/unknown.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/waning-crescent.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/waning-gibbous.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/waxing-crescent.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/moonphases/svg/waxing-gibbous.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/pdf/kosmorro.sty -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/pdf/template.tex -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/png/kosmorro-icon-white.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/png/kosmorro-icon.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/png/kosmorro-logo-white.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/png/kosmorro-logo.png -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/svg/kosmorro-icon-white.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/svg/kosmorro-icon.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/svg/kosmorro-logo-white.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/assets/svg/kosmorro-logo.svg -%%PYTHON_SITELIBDIR%%/_kosmorro/date.py -%%PYTHON_SITELIBDIR%%/_kosmorro/debug.py -%%PYTHON_SITELIBDIR%%/_kosmorro/dumper.py -%%PYTHON_SITELIBDIR%%/_kosmorro/environment.py -%%PYTHON_SITELIBDIR%%/_kosmorro/exceptions.py -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/__init__.py -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/__pycache__/strings%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/__pycache__/strings%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/__pycache__/utils%%PYTHON_EXT_SUFFIX%%.opt-1.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/__pycache__/utils%%PYTHON_EXT_SUFFIX%%.pyc -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/strings.py -%%PYTHON_SITELIBDIR%%/_kosmorro/i18n/utils.py -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/de/LC_MESSAGES/messages.po -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/en_XA/LC_MESSAGES/messages.po -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/es/LC_MESSAGES/messages.po -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/fr/LC_MESSAGES/messages.po -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/messages.pot -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/nb_NO/LC_MESSAGES/messages.po -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/nl/LC_MESSAGES/messages.po -%%PYTHON_SITELIBDIR%%/_kosmorro/locales/ru/LC_MESSAGES/messages.po -%%PYTHON_SITELIBDIR%%/_kosmorro/main.py -man/man1/kosmorro.1.gz -man/man7/kosmorro.7.gz +%%PYTHON_SITELIBDIR%%/kosmorro/__init__.py +%%PYTHON_SITELIBDIR%%/kosmorro/__main__.py +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/__main__%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/__main__%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/date%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/date%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/debug%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/debug%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/dumper%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/dumper%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/environment%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/environment%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/exceptions%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/exceptions%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/utils%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/__pycache__/utils%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/first-quarter.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/full-moon.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/last-quarter.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/new-moon.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/unknown.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/waning-crescent.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/waning-gibbous.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/waxing-crescent.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/png/waxing-gibbous.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/first-quarter.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/full-moon.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/last-quarter.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/new-moon.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/unknown.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/waning-crescent.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/waning-gibbous.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/waxing-crescent.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/moonphases/svg/waxing-gibbous.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/pdf/kosmorro.sty +%%PYTHON_SITELIBDIR%%/kosmorro/assets/pdf/template.tex +%%PYTHON_SITELIBDIR%%/kosmorro/assets/png/kosmorro-icon-white.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/png/kosmorro-icon.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/png/kosmorro-logo-white.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/png/kosmorro-logo.png +%%PYTHON_SITELIBDIR%%/kosmorro/assets/svg/kosmorro-icon-white.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/svg/kosmorro-icon.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/svg/kosmorro-logo-white.svg +%%PYTHON_SITELIBDIR%%/kosmorro/assets/svg/kosmorro-logo.svg +%%PYTHON_SITELIBDIR%%/kosmorro/date.py +%%PYTHON_SITELIBDIR%%/kosmorro/debug.py +%%PYTHON_SITELIBDIR%%/kosmorro/dumper.py +%%PYTHON_SITELIBDIR%%/kosmorro/environment.py +%%PYTHON_SITELIBDIR%%/kosmorro/exceptions.py +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/__init__.py +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/__pycache__/__init__%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/__pycache__/strings%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/__pycache__/strings%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/__pycache__/utils%%PYTHON_EXT_SUFFIX%%.opt-1.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/__pycache__/utils%%PYTHON_EXT_SUFFIX%%.pyc +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/strings.py +%%PYTHON_SITELIBDIR%%/kosmorro/i18n/utils.py +%%PYTHON_SITELIBDIR%%/kosmorro/locales/de/LC_MESSAGES/messages.mo +%%PYTHON_SITELIBDIR%%/kosmorro/locales/de/LC_MESSAGES/messages.po +%%PYTHON_SITELIBDIR%%/kosmorro/locales/es/LC_MESSAGES/messages.mo +%%PYTHON_SITELIBDIR%%/kosmorro/locales/es/LC_MESSAGES/messages.po +%%PYTHON_SITELIBDIR%%/kosmorro/locales/fr/LC_MESSAGES/messages.mo +%%PYTHON_SITELIBDIR%%/kosmorro/locales/fr/LC_MESSAGES/messages.po +%%PYTHON_SITELIBDIR%%/kosmorro/locales/messages.pot +%%PYTHON_SITELIBDIR%%/kosmorro/locales/nb_NO/LC_MESSAGES/messages.mo +%%PYTHON_SITELIBDIR%%/kosmorro/locales/nb_NO/LC_MESSAGES/messages.po +%%PYTHON_SITELIBDIR%%/kosmorro/locales/nl/LC_MESSAGES/messages.mo +%%PYTHON_SITELIBDIR%%/kosmorro/locales/nl/LC_MESSAGES/messages.po +%%PYTHON_SITELIBDIR%%/kosmorro/locales/ru/LC_MESSAGES/messages.mo +%%PYTHON_SITELIBDIR%%/kosmorro/locales/ru/LC_MESSAGES/messages.po +%%PYTHON_SITELIBDIR%%/kosmorro/utils.py diff --git a/astro/py-kosmorrolib/Makefile b/astro/py-kosmorrolib/Makefile index 575e2134b75..03237589db7 100644 --- a/astro/py-kosmorrolib/Makefile +++ b/astro/py-kosmorrolib/Makefile @@ -1,7 +1,7 @@ # Created by: thierry@pompo.net PORTNAME= kosmorrolib -PORTVERSION= 1.0.5 +PORTVERSION= 1.0.6 CATEGORIES= astro python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/astro/py-kosmorrolib/distinfo b/astro/py-kosmorrolib/distinfo index d8c31a12af4..0de6261c642 100644 --- a/astro/py-kosmorrolib/distinfo +++ b/astro/py-kosmorrolib/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645555075 -SHA256 (kosmorrolib-1.0.5.tar.gz) = 1dd63aea8407164ba8ecabcf2760f9c4c2a6fb3b13f39bc8b1aa100df60dc9bc -SIZE (kosmorrolib-1.0.5.tar.gz) = 24623 +TIMESTAMP = 1648234102 +SHA256 (kosmorrolib-1.0.6.tar.gz) = 8c77f40b346d183147cb3e927b3ea5c7d4d7067ce7e49edb73821566f74d5e51 +SIZE (kosmorrolib-1.0.6.tar.gz) = 24986 diff --git a/astro/py-pysofa/files/patch-2to3 b/astro/py-pysofa/files/patch-2to3 new file mode 100644 index 00000000000..837edb6e8fc --- /dev/null +++ b/astro/py-pysofa/files/patch-2to3 @@ -0,0 +1,182 @@ +--- pysofa/pysofa_ctypes.py.orig 2011-01-30 09:53:37 UTC ++++ pysofa/pysofa_ctypes.py +@@ -156,7 +156,7 @@ def af2a(s, ideg, iamin, asec): + .. seealso:: |MANUAL| page 21 + """ + +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + rad = c_double() + s = _sofa.iauAf2a(str(s), ideg, iamin, asec, byref(rad)) +@@ -1011,7 +1011,7 @@ def dtf2d(scale, iy, im, id, ihr, imn, sec): + .. seealso:: |MANUAL| page 64 + """ + +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + d1 = c_double() + d2 = c_double() +@@ -4236,7 +4236,7 @@ def taitt(tai1, tai2): + + .. seealso:: |MANUAL| page 224 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tt1 = c_double() + tt2 = c_double() +@@ -4269,7 +4269,7 @@ def taiut1(tai1, tai2, dta): + + .. seealso:: |MANUAL| page 225 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + ut11 = c_double() + ut12 = c_double() +@@ -4309,7 +4309,7 @@ def taiutc(tai1, tai2): + + .. seealso:: |MANUAL| page 226 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + utc1 = c_double() + utc2 = c_double() +@@ -4342,7 +4342,7 @@ def tcbtdb(tcb1, tcb2): + + .. seealso:: |MANUAL| page 227 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tdb1 = c_double() + tdb2 = c_double() +@@ -4371,7 +4371,7 @@ def tcgtt(tcg1, tcg2): + + .. seealso:: |MANUAL| page 228 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tt1 = c_double() + tt2 = c_double() +@@ -4400,7 +4400,7 @@ def tdbtcb(tdb1, tdb2): + + .. seealso:: |MANUAL| page 229 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tcb1 = c_double() + tcb2 = c_double() +@@ -4433,7 +4433,7 @@ def tdbtt(tdb1, tdb2, dtr): + + .. seealso:: |MANUAL| page 230 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tt1 = c_double() + tt2 = c_double() +@@ -4472,7 +4472,7 @@ def tf2a(s, ihour, imin, sec): + .. seealso:: |MANUAL| page 231 + """ + +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + rad = c_double() + s = _sofa.iauTf2a(str(s), ihour, imin, sec, byref(rad)) +@@ -4510,7 +4510,7 @@ def tf2d(s, ihour, imin, sec): + .. seealso:: |MANUAL| page 232 + """ + +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + days = c_double() + s = _sofa.iauTf2d(str(s), ihour, imin, sec, byref(days)) +@@ -4600,7 +4600,7 @@ def tttai(tt1, tt2): + + .. seealso:: |MANUAL| page 236 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tai1 = c_double() + tai2 = c_double() +@@ -4629,7 +4629,7 @@ def tttcg(tt1, tt2): + + .. seealso:: |MANUAL| page 237 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tcg1 = c_double() + tcg2 = c_double() +@@ -4662,7 +4662,7 @@ def tttdb(tt1, tt2, dtr): + + .. seealso:: |MANUAL| page 238 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tdb1 = c_double() + tdb2 = c_double() +@@ -4695,7 +4695,7 @@ def ttut1(tt1, tt2, dt): + + .. seealso:: |MANUAL| page 239 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + ut11 = c_double() + ut12 = c_double() +@@ -4728,7 +4728,7 @@ def ut1tai(ut11, ut12, dta): + + .. seealso:: |MANUAL| page 240 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tai1 = c_double() + tai2 = c_double() +@@ -4761,7 +4761,7 @@ def ut1tt(ut11, ut12, dt): + + .. seealso:: |MANUAL| page 241 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tt1 = c_double() + tt2 = c_double() +@@ -4805,7 +4805,7 @@ def ut1utc(ut11, ut12, dut1): + + .. seealso:: |MANUAL| page 242 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + utc1 = c_double() + utc2 = c_double() +@@ -4849,7 +4849,7 @@ def utctai(utc1, utc2): + + .. seealso:: |MANUAL| page 243 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + tai1 = c_double() + tai2 = c_double() +@@ -4897,7 +4897,7 @@ def utcut1(utc1, utc2, dut1): + + .. seealso:: |MANUAL| page 244 + """ +- if __sofa_version < (2010, 12, 01): ++ if __sofa_version < (2010, 12, 0o1): + raise NotImplementedError + ut11 = c_double() + ut12 = c_double() diff --git a/astro/py-ro/files/patch-2to3 b/astro/py-ro/files/patch-2to3 new file mode 100644 index 00000000000..f96bb60b9f4 --- /dev/null +++ b/astro/py-ro/files/patch-2to3 @@ -0,0 +1,191 @@ +--- python/RO/AddCallback.py.orig 2015-11-03 17:58:36 UTC ++++ python/RO/AddCallback.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, print_function ++ + """Mixing class(es) for adding callback capabilities. + + History: +@@ -145,7 +145,7 @@ class BaseMixin(object): + return + + if not callable(callFunc): +- raise ValueError, "callFunc %r is not callable" % (callFunc,) ++ raise ValueError("callFunc %r is not callable" % (callFunc,)) + + # add new function + if callFunc not in self._callbacks: +@@ -264,7 +264,7 @@ class TkButtonMixin(BaseMixin): + + if command is not None: + if not callable(command): +- raise ValueError, "command %r is not callable" % (command,) ++ raise ValueError("command %r is not callable" % (command,)) + def doCommand(wdg): + return command() + self.addCallback(doCommand) +--- python/RO/procFiles.py.orig 2015-09-24 22:49:08 UTC ++++ python/RO/procFiles.py +@@ -1,5 +1,5 @@ + #!/usr/bin/env python +-from __future__ import absolute_import, division, print_function ++ + """procFiles calls a user-supplied function "func" to process a set of files one by one. + The processed data is typically concatenated into one output file, but this behavior + is controlled by outPath. +@@ -161,7 +161,7 @@ def procFiles ( + ): + # make sure func is callable + if not callable(func): +- raise RuntimeError, "supplied function is not callable" ++ raise RuntimeError("supplied function is not callable") + + # handle case of inPathList being a single string + inPathList = RO.SeqUtil.asSequence(inPathList) +@@ -212,7 +212,7 @@ def procFiles ( + sys.stdout = file(outPath, 'w') + except ImportError: + # unknown platform; use standard prompt +- outFile = raw_input( ++ outFile = input( + "output file relative to %r [stdout]: " % outDir) + + # generate outPath, and if it's a file, open it and redirect stdout +--- python/RO/Wdg/PythonWdg.py.orig 2015-09-24 22:59:33 UTC ++++ python/RO/Wdg/PythonWdg.py +@@ -1,5 +1,5 @@ + #!/usr/bin/env python +-from __future__ import division, print_function ++ + """An interactive Python session and simple script file editor/runner + that may be used from Tkinter scripts. Before running a script,x=Tk() is replaced with x=Toplevel() and mainloop() is eliminated. Hence some Tk scripts + may be safely run. Presumably there are limitations. I suspect that mucking about +@@ -41,13 +41,13 @@ __all__ = ['PythonWdg'] + + import os + import re +-import Tkinter +-import tkFileDialog ++import tkinter ++import tkinter.filedialog + import RO.CnvUtil + import RO.OS +-import Text ++from . import Text + +-class PythonWdg(Tkinter.Frame): ++class PythonWdg(tkinter.Frame): + """A frame containing text window into which you may enter Python code. + + Inputs: +@@ -60,7 +60,7 @@ class PythonWdg(Tkinter.Frame): + filePath = None, + helpURL = None, + **kargs): +- Tkinter.Frame.__init__(self, master, **kargs) ++ tkinter.Frame.__init__(self, master, **kargs) + + self.master=master + self.filePath = filePath +@@ -71,39 +71,39 @@ class PythonWdg(Tkinter.Frame): + height = 10, + helpURL = helpURL + ) +- self.inputWdg.grid(row=0, column=0, sticky=Tkinter.NSEW) ++ self.inputWdg.grid(row=0, column=0, sticky=tkinter.NSEW) + self.inputWdg.bind("", self.run) + +- self.scroll = Tkinter.Scrollbar(self, command=self.inputWdg.yview) ++ self.scroll = tkinter.Scrollbar(self, command=self.inputWdg.yview) + self.inputWdg.configure(yscrollcommand=self.scroll.set) +- self.scroll.grid(row=0, column=1, sticky=Tkinter.NS) ++ self.scroll.grid(row=0, column=1, sticky=tkinter.NS) + + if self.filePath: + fd = RO.OS.openUniv(self.filePath) + try: +- self.inputWdg.delete(1.0, Tkinter.END) ++ self.inputWdg.delete(1.0, tkinter.END) + for line in fd.readlines(): +- self.inputWdg.insert(Tkinter.END, line) ++ self.inputWdg.insert(tkinter.END, line) + finally: + fd.close() + +- self.cmdbar = Tkinter.Frame(self, borderwidth=2, relief=Tkinter.SUNKEN) +- self.open = Tkinter.Button(self.cmdbar, text='Open...', command=self.open) +- self.open.pack(side=Tkinter.LEFT, expand=0, padx=3, pady=3) +- self.save = Tkinter.Button(self.cmdbar, text='Save...', command=self.save) +- self.save.pack(side=Tkinter.LEFT, expand=0, padx=3, pady=3) +- self.clr = Tkinter.Button(self.cmdbar, text='Clear', command=self.clr) +- self.clr.pack(side=Tkinter.LEFT, expand=0, padx=3, pady=3) +- self.run =Tkinter.Button(self.cmdbar, text='Run', command=self.run) +- self.run.pack(side=Tkinter.RIGHT, expand=0, padx=3, pady=3) +- self.cmdbar.grid(row=1, column=0, columnspan=2, sticky=Tkinter.EW) ++ self.cmdbar = tkinter.Frame(self, borderwidth=2, relief=tkinter.SUNKEN) ++ self.open = tkinter.Button(self.cmdbar, text='Open...', command=self.open) ++ self.open.pack(side=tkinter.LEFT, expand=0, padx=3, pady=3) ++ self.save = tkinter.Button(self.cmdbar, text='Save...', command=self.save) ++ self.save.pack(side=tkinter.LEFT, expand=0, padx=3, pady=3) ++ self.clr = tkinter.Button(self.cmdbar, text='Clear', command=self.clr) ++ self.clr.pack(side=tkinter.LEFT, expand=0, padx=3, pady=3) ++ self.run =tkinter.Button(self.cmdbar, text='Run', command=self.run) ++ self.run.pack(side=tkinter.RIGHT, expand=0, padx=3, pady=3) ++ self.cmdbar.grid(row=1, column=0, columnspan=2, sticky=tkinter.EW) + + self.grid_rowconfigure(0, weight=1) + self.grid_columnconfigure(0, weight=1) + self.inputWdg.focus_set() + + def run(self, evt=None, globs=None, locs=None): +- script = self.inputWdg.get(1.0, Tkinter.END) ++ script = self.inputWdg.get(1.0, tkinter.END) + + # replace x = Tk() with x = Toplevel() + tkPat = re.compile(r"^(.*=\s*)(:?ROStd)?Tk\(\)(.*)$", re.MULTILINE) +@@ -118,26 +118,26 @@ class PythonWdg(Tkinter.Frame): + globs = __main__.__dict__ + if locs is None: + locs = globs +- exec script in globs, locs ++ exec(script, globs, locs) + + def open(self): +- filePath = tkFileDialog.askopenfilename() ++ filePath = tkinter.filedialog.askopenfilename() + if not filePath: + return + filePath = RO.CnvUtil.asStr(filePath) +- top = Tkinter.Toplevel(self.master, ) ++ top = tkinter.Toplevel(self.master, ) + top.title(os.path.basename(filePath)) + frame = PythonWdg(top, filePath=filePath) +- frame.pack(expand=Tkinter.YES, fill=Tkinter.BOTH) ++ frame.pack(expand=tkinter.YES, fill=tkinter.BOTH) + + def save(self, forPrt=None): +- script = self.inputWdg.get(1.0, Tkinter.END) ++ script = self.inputWdg.get(1.0, tkinter.END) + if not script: + return + if forPrt: + filePath = 'prt.tmp' + else: +- filePath = tkFileDialog.asksaveasfilename(initialfile=self.filePath) ++ filePath = tkinter.filedialog.asksaveasfilename(initialfile=self.filePath) + if not filePath: + return + self.filePath = RO.CnvUtil.asStr(filePath) +@@ -152,10 +152,10 @@ class PythonWdg(Tkinter.Frame): + + + if __name__ == '__main__': +- root = Tkinter.Tk() ++ root = tkinter.Tk() + + testFrame = PythonWdg(root) + root.geometry("+0+450") +- testFrame.pack(expand=Tkinter.YES, fill=Tkinter.BOTH) ++ testFrame.pack(expand=tkinter.YES, fill=tkinter.BOTH) + + root.mainloop() diff --git a/audio/abgate-lv2/Makefile b/audio/abgate-lv2/Makefile index 396d756d9e7..6c9d7ec3eba 100644 --- a/audio/abgate-lv2/Makefile +++ b/audio/abgate-lv2/Makefile @@ -1,7 +1,7 @@ PORTNAME= abGate DISTVERSIONPREFIX= v DISTVERSION= 1.2.0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= audio PKGNAMESUFFIX= -lv2 diff --git a/audio/amarok/Makefile b/audio/amarok/Makefile index 08d6f108036..3c3e589275c 100644 --- a/audio/amarok/Makefile +++ b/audio/amarok/Makefile @@ -1,7 +1,7 @@ PORTNAME= amarok DISTVERSIONPREFIX= v DISTVERSION= 2.9.71 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= audio kde MAINTAINER= kde@FreeBSD.org diff --git a/audio/aqualung/Makefile b/audio/aqualung/Makefile index 350065ec923..05d325e3850 100644 --- a/audio/aqualung/Makefile +++ b/audio/aqualung/Makefile @@ -2,7 +2,7 @@ PORTNAME= aqualung PORTVERSION= 1.0 -PORTREVISION= 17 +PORTREVISION= 18 CATEGORIES= audio MASTER_SITES= SF diff --git a/audio/ardour6/Makefile b/audio/ardour6/Makefile index 3b2bbdce2bb..7c8f82af65a 100644 --- a/audio/ardour6/Makefile +++ b/audio/ardour6/Makefile @@ -2,7 +2,7 @@ PORTNAME= ardour6 PORTVERSION= 6.9.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio MASTER_SITES= LOCAL/nc/mirror/ \ https://community.ardour.org/srctar/ \ diff --git a/audio/ario/Makefile b/audio/ario/Makefile index e52c67603ea..8e88a1dd555 100644 --- a/audio/ario/Makefile +++ b/audio/ario/Makefile @@ -2,6 +2,7 @@ PORTNAME= ario PORTVERSION= 1.6 +PORTREVISION= 1 CATEGORIES= audio MASTER_SITES= SF/ario-player/ario-player/${PORTVERSION} diff --git a/audio/denemo/Makefile b/audio/denemo/Makefile index 728cbf6c563..823ec04bf1d 100644 --- a/audio/denemo/Makefile +++ b/audio/denemo/Makefile @@ -2,7 +2,7 @@ PORTNAME= denemo PORTVERSION= 2.0.6 -PORTREVISION= 13 +PORTREVISION= 14 CATEGORIES= audio MASTER_SITES= GNU diff --git a/audio/easytag/Makefile b/audio/easytag/Makefile index 2e019c505ea..ff9e6e44fe9 100644 --- a/audio/easytag/Makefile +++ b/audio/easytag/Makefile @@ -2,7 +2,7 @@ PORTNAME= easytag PORTVERSION= 2.4.3 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= audio MASTER_SITES= GNOME diff --git a/audio/eq10q-lv2/Makefile b/audio/eq10q-lv2/Makefile index f035970dd4f..84e1f32066f 100644 --- a/audio/eq10q-lv2/Makefile +++ b/audio/eq10q-lv2/Makefile @@ -1,6 +1,6 @@ PORTNAME= eq10q DISTVERSION= 2.2 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= audio MASTER_SITES= SF/${PORTNAME} PKGNAMESUFFIX= -lv2 diff --git a/audio/ezstream/Makefile b/audio/ezstream/Makefile index 789935d1bbb..b12e254b40c 100644 --- a/audio/ezstream/Makefile +++ b/audio/ezstream/Makefile @@ -2,7 +2,7 @@ PORTNAME= ezstream PORTVERSION= 0.5.6 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= https://downloads.xiph.org/releases/ezstream/ \ https://ftp.osuosl.org/pub/xiph/releases/ezstream/ diff --git a/audio/fluidsynth/Makefile b/audio/fluidsynth/Makefile index 6b2f7375bb5..5f0a44de79a 100644 --- a/audio/fluidsynth/Makefile +++ b/audio/fluidsynth/Makefile @@ -2,7 +2,7 @@ PORTNAME= fluidsynth DISTVERSIONPREFIX= v -DISTVERSION= 2.2.4 +DISTVERSION= 2.2.6 CATEGORIES= audio MAINTAINER= multimedia@FreeBSD.org diff --git a/audio/fluidsynth/distinfo b/audio/fluidsynth/distinfo index 087be22bb6c..994ad578aac 100644 --- a/audio/fluidsynth/distinfo +++ b/audio/fluidsynth/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637940656 -SHA256 (FluidSynth-fluidsynth-v2.2.4_GH0.tar.gz) = 83cb1dba04c632ede74f0c0717018b062c0e00b639722203b23f77a961afd390 -SIZE (FluidSynth-fluidsynth-v2.2.4_GH0.tar.gz) = 1746638 +TIMESTAMP = 1648069802 +SHA256 (FluidSynth-fluidsynth-v2.2.6_GH0.tar.gz) = ca90fe675cacd9a7b442662783c4e7fa0e1fd638b28d64105a4e3fe0f618d20f +SIZE (FluidSynth-fluidsynth-v2.2.6_GH0.tar.gz) = 1749202 diff --git a/audio/fluidsynth/pkg-plist b/audio/fluidsynth/pkg-plist index d7b2f137773..764bc3dbbcd 100644 --- a/audio/fluidsynth/pkg-plist +++ b/audio/fluidsynth/pkg-plist @@ -19,6 +19,6 @@ include/fluidsynth/version.h include/fluidsynth/voice.h lib/libfluidsynth.so lib/libfluidsynth.so.3 -lib/libfluidsynth.so.3.0.4 +lib/libfluidsynth.so.3.0.6 libdata/pkgconfig/fluidsynth.pc man/man1/fluidsynth.1.gz diff --git a/audio/forked-daapd/Makefile b/audio/forked-daapd/Makefile index a61b3263abf..3ce647d8819 100644 --- a/audio/forked-daapd/Makefile +++ b/audio/forked-daapd/Makefile @@ -2,7 +2,7 @@ PORTNAME= forked-daapd DISTVERSION= 27.2 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= audio MASTER_SITES= https://github.com/ejurgensen/forked-daapd/releases/download/${DISTVERSION}/ diff --git a/audio/ganv/Makefile b/audio/ganv/Makefile index d82f3a3c588..865680ac724 100644 --- a/audio/ganv/Makefile +++ b/audio/ganv/Makefile @@ -1,5 +1,6 @@ PORTNAME= ganv PORTVERSION= 1.8.0 +PORTREVISION= 1 CATEGORIES= audio MASTER_SITES= https://download.drobilla.net/ diff --git a/audio/gbemol/Makefile b/audio/gbemol/Makefile index 17515f2fb31..f34d20d46d4 100644 --- a/audio/gbemol/Makefile +++ b/audio/gbemol/Makefile @@ -2,7 +2,7 @@ PORTNAME= gbemol PORTVERSION= 0.3.2 -PORTREVISION= 13 +PORTREVISION= 14 CATEGORIES= audio MASTER_SITES= SF diff --git a/audio/glurp/Makefile b/audio/glurp/Makefile index 678122531ff..214a3872d5b 100644 --- a/audio/glurp/Makefile +++ b/audio/glurp/Makefile @@ -2,7 +2,7 @@ PORTNAME= glurp PORTVERSION= 0.12.3 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= audio MASTER_SITES= SF diff --git a/audio/gmpc-discogs/Makefile b/audio/gmpc-discogs/Makefile index c82509f43e6..8f3da966a74 100644 --- a/audio/gmpc-discogs/Makefile +++ b/audio/gmpc-discogs/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-discogs PORTVERSION= 0.20.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc-extraplaylist/Makefile b/audio/gmpc-extraplaylist/Makefile index df5d1746e6d..f040db03776 100644 --- a/audio/gmpc-extraplaylist/Makefile +++ b/audio/gmpc-extraplaylist/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-extraplaylist PORTVERSION= 0.20.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc-lastfm/Makefile b/audio/gmpc-lastfm/Makefile index 5a28fc94699..ad7e70d1cf2 100644 --- a/audio/gmpc-lastfm/Makefile +++ b/audio/gmpc-lastfm/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-lastfm PORTVERSION= 0.20.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ DISTNAME= gmpc-last-fm-${PORTVERSION} diff --git a/audio/gmpc-lyrics/Makefile b/audio/gmpc-lyrics/Makefile index 743266863c8..9e97fc9872e 100644 --- a/audio/gmpc-lyrics/Makefile +++ b/audio/gmpc-lyrics/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-lyrics PORTVERSION= 11.8.16 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc-lyricsplugin/Makefile b/audio/gmpc-lyricsplugin/Makefile index 9f9bebf7909..389c6601383 100644 --- a/audio/gmpc-lyricsplugin/Makefile +++ b/audio/gmpc-lyricsplugin/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-lyricsplugin PORTVERSION= 0.20.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc-magnatune/Makefile b/audio/gmpc-magnatune/Makefile index 6bcd8b6969c..a2612986cf6 100644 --- a/audio/gmpc-magnatune/Makefile +++ b/audio/gmpc-magnatune/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-magnatune PORTVERSION= 11.8.16 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc-mdcover/Makefile b/audio/gmpc-mdcover/Makefile index 6ba08e1f08c..a6de47a29b6 100644 --- a/audio/gmpc-mdcover/Makefile +++ b/audio/gmpc-mdcover/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-mdcover PORTVERSION= 0.20.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc-mserver/Makefile b/audio/gmpc-mserver/Makefile index 5ac892da273..468b4270067 100644 --- a/audio/gmpc-mserver/Makefile +++ b/audio/gmpc-mserver/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-mserver PORTVERSION= 0.20.0 -PORTREVISION= 17 +PORTREVISION= 18 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc-shout/Makefile b/audio/gmpc-shout/Makefile index dbe51491a20..38b191d02dd 100644 --- a/audio/gmpc-shout/Makefile +++ b/audio/gmpc-shout/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc-shout PORTVERSION= 0.20.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmpc/Makefile b/audio/gmpc/Makefile index a79176bb19a..0bd3c816389 100644 --- a/audio/gmpc/Makefile +++ b/audio/gmpc/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmpc PORTVERSION= 11.8.16 -PORTREVISION= 5 +PORTREVISION= 6 PORTEPOCH= 1 CATEGORIES= audio MASTER_SITES= http://download.sarine.nl/Programs/gmpc/${PORTVERSION}/ diff --git a/audio/gmtp/Makefile b/audio/gmtp/Makefile index cc624c73b58..927eee483bb 100644 --- a/audio/gmtp/Makefile +++ b/audio/gmtp/Makefile @@ -1,5 +1,6 @@ PORTNAME= gmtp PORTVERSION= 1.3.11 +PORTREVISION= 1 CATEGORIES= audio MASTER_SITES= SF/${PORTNAME}/gMTP-${PORTVERSION}/ diff --git a/audio/gnaural/Makefile b/audio/gnaural/Makefile index 23f3df35af7..d3ac357452f 100644 --- a/audio/gnaural/Makefile +++ b/audio/gnaural/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnaural PORTVERSION= 1.0.20110606 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= audio MASTER_SITES= SF/${PORTNAME}/Gnaural/ \ LOCAL/martymac diff --git a/audio/gsequencer/Makefile b/audio/gsequencer/Makefile index 574a2536c70..b803df29275 100644 --- a/audio/gsequencer/Makefile +++ b/audio/gsequencer/Makefile @@ -1,5 +1,6 @@ PORTNAME= gsequencer DISTVERSION= 3.10.4 +PORTREVISION= 1 CATEGORIES= audio MASTER_SITES= SAVANNAH/gsequencer/${DISTVERSION:R}.x diff --git a/audio/gstreamer1-plugins-a52dec/Makefile b/audio/gstreamer1-plugins-a52dec/Makefile index 14a7e308979..3cd5c2b059f 100644 --- a/audio/gstreamer1-plugins-a52dec/Makefile +++ b/audio/gstreamer1-plugins-a52dec/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer ATSC A/52 stream aka AC-3 (dvd audio) plugin diff --git a/audio/gstreamer1-plugins-amrnb/Makefile b/audio/gstreamer1-plugins-amrnb/Makefile index 014e9a77ce1..0e6e563772e 100644 --- a/audio/gstreamer1-plugins-amrnb/Makefile +++ b/audio/gstreamer1-plugins-amrnb/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer OpenCore based Adaptive Multi-Rate Narrow-Band plugin diff --git a/audio/gstreamer1-plugins-amrwbdec/Makefile b/audio/gstreamer1-plugins-amrwbdec/Makefile index fb124678c94..f1a38d978a4 100644 --- a/audio/gstreamer1-plugins-amrwbdec/Makefile +++ b/audio/gstreamer1-plugins-amrwbdec/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio PKGNAMESUFFIX= 1-plugins-amrwbdec diff --git a/audio/gstreamer1-plugins-bs2b/Makefile b/audio/gstreamer1-plugins-bs2b/Makefile index 96be3543f4d..0955846bd71 100644 --- a/audio/gstreamer1-plugins-bs2b/Makefile +++ b/audio/gstreamer1-plugins-bs2b/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer Bauer Stereophonic to Banaural BSP (bs2b) plugin diff --git a/audio/gstreamer1-plugins-cdparanoia/Makefile b/audio/gstreamer1-plugins-cdparanoia/Makefile index d5a4744cb57..10b4f86b1f5 100644 --- a/audio/gstreamer1-plugins-cdparanoia/Makefile +++ b/audio/gstreamer1-plugins-cdparanoia/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer CDDA extraction (aka audio ripping) plugin diff --git a/audio/gstreamer1-plugins-chromaprint/Makefile b/audio/gstreamer1-plugins-chromaprint/Makefile index b498be76ec2..4380d8068bc 100644 --- a/audio/gstreamer1-plugins-chromaprint/Makefile +++ b/audio/gstreamer1-plugins-chromaprint/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer audio fingerprint extracting plugin diff --git a/audio/gstreamer1-plugins-faac/Makefile b/audio/gstreamer1-plugins-faac/Makefile index 7d500dbae1d..be812f24822 100644 --- a/audio/gstreamer1-plugins-faac/Makefile +++ b/audio/gstreamer1-plugins-faac/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer MPEG-2 and MPEG-4 AAC encoder plugin diff --git a/audio/gstreamer1-plugins-faad/Makefile b/audio/gstreamer1-plugins-faad/Makefile index b7708ed22ab..3e7c39c0265 100644 --- a/audio/gstreamer1-plugins-faad/Makefile +++ b/audio/gstreamer1-plugins-faad/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer MPEG-2 and MPEG-4 AAC decoder plugin diff --git a/audio/gstreamer1-plugins-flac/Makefile b/audio/gstreamer1-plugins-flac/Makefile index 58008282a34..c42da0c173f 100644 --- a/audio/gstreamer1-plugins-flac/Makefile +++ b/audio/gstreamer1-plugins-flac/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer free lossless audio encoder/decoder plugin diff --git a/audio/gstreamer1-plugins-flite/Makefile b/audio/gstreamer1-plugins-flite/Makefile index 9ec3ca09f67..e466c653c13 100644 --- a/audio/gstreamer1-plugins-flite/Makefile +++ b/audio/gstreamer1-plugins-flite/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer flite run-time speech synthesis engine plugin diff --git a/audio/gstreamer1-plugins-gme/Makefile b/audio/gstreamer1-plugins-gme/Makefile index 320aad4f97d..c43bbc2404d 100644 --- a/audio/gstreamer1-plugins-gme/Makefile +++ b/audio/gstreamer1-plugins-gme/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer emulate gaming console sound processors plugin diff --git a/audio/gstreamer1-plugins-gsm/Makefile b/audio/gstreamer1-plugins-gsm/Makefile index 149f2c3d950..d5d50b797e2 100644 --- a/audio/gstreamer1-plugins-gsm/Makefile +++ b/audio/gstreamer1-plugins-gsm/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer gsm encoding/decoding plugin diff --git a/audio/gstreamer1-plugins-jack/Makefile b/audio/gstreamer1-plugins-jack/Makefile index 9ce5a82b1e5..42ec35509cd 100644 --- a/audio/gstreamer1-plugins-jack/Makefile +++ b/audio/gstreamer1-plugins-jack/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer low-latency audio server plugin diff --git a/audio/gstreamer1-plugins-ladspa/Makefile b/audio/gstreamer1-plugins-ladspa/Makefile index 1b716a4c620..4e2a1acae67 100644 --- a/audio/gstreamer1-plugins-ladspa/Makefile +++ b/audio/gstreamer1-plugins-ladspa/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= Gstreamer ladspa (Linux Audio Developer's Simple Plugin API) plugin diff --git a/audio/gstreamer1-plugins-lame/Makefile b/audio/gstreamer1-plugins-lame/Makefile index ae97201e927..684d232779f 100644 --- a/audio/gstreamer1-plugins-lame/Makefile +++ b/audio/gstreamer1-plugins-lame/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer High-quality free mp3 encode plugin diff --git a/audio/gstreamer1-plugins-lv2/Makefile b/audio/gstreamer1-plugins-lv2/Makefile index 4af8beff452..ed0eeff8282 100644 --- a/audio/gstreamer1-plugins-lv2/Makefile +++ b/audio/gstreamer1-plugins-lv2/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= Gstreamer lv2 wrapper plugin diff --git a/audio/gstreamer1-plugins-modplug/Makefile b/audio/gstreamer1-plugins-modplug/Makefile index aa2d4e5fecd..7b52b3522ee 100644 --- a/audio/gstreamer1-plugins-modplug/Makefile +++ b/audio/gstreamer1-plugins-modplug/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer module decoder based on modplug egine plugin diff --git a/audio/gstreamer1-plugins-mpg123/Makefile b/audio/gstreamer1-plugins-mpg123/Makefile index 7a75cfb87ed..6acc59dac85 100644 --- a/audio/gstreamer1-plugins-mpg123/Makefile +++ b/audio/gstreamer1-plugins-mpg123/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer MPEG Layer 1, 2, and 3 plugin diff --git a/audio/gstreamer1-plugins-musepack/Makefile b/audio/gstreamer1-plugins-musepack/Makefile index 895c4861f6e..083b9c20f8e 100644 --- a/audio/gstreamer1-plugins-musepack/Makefile +++ b/audio/gstreamer1-plugins-musepack/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= Gstreamer musepack mpc encoder/decoder plugin diff --git a/audio/gstreamer1-plugins-ogg/Makefile b/audio/gstreamer1-plugins-ogg/Makefile index 7dec02f16d1..000181d6b5e 100644 --- a/audio/gstreamer1-plugins-ogg/Makefile +++ b/audio/gstreamer1-plugins-ogg/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer Ogg bitstream plugin diff --git a/audio/gstreamer1-plugins-openmpt/Makefile b/audio/gstreamer1-plugins-openmpt/Makefile index 4764f62dbd5..2a486d257db 100644 --- a/audio/gstreamer1-plugins-openmpt/Makefile +++ b/audio/gstreamer1-plugins-openmpt/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio COMMENT= GStreamer OpenMPT audio decoder plugin diff --git a/audio/gstreamer1-plugins-opus/Makefile b/audio/gstreamer1-plugins-opus/Makefile index 59c7edf8aee..e2621268233 100644 --- a/audio/gstreamer1-plugins-opus/Makefile +++ b/audio/gstreamer1-plugins-opus/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer Opus audio encoder/decoder plugin diff --git a/audio/gstreamer1-plugins-pulse/Makefile b/audio/gstreamer1-plugins-pulse/Makefile index dc568c9402e..35497a2fd29 100644 --- a/audio/gstreamer1-plugins-pulse/Makefile +++ b/audio/gstreamer1-plugins-pulse/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer pulseaudio plugin diff --git a/audio/gstreamer1-plugins-shout2/Makefile b/audio/gstreamer1-plugins-shout2/Makefile index 47a1e373119..68bf12e94ea 100644 --- a/audio/gstreamer1-plugins-shout2/Makefile +++ b/audio/gstreamer1-plugins-shout2/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer icecast output plugin diff --git a/audio/gstreamer1-plugins-sidplay/Makefile b/audio/gstreamer1-plugins-sidplay/Makefile index 2e34327ed74..2d92ba2ad74 100644 --- a/audio/gstreamer1-plugins-sidplay/Makefile +++ b/audio/gstreamer1-plugins-sidplay/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer Commodore SID audio decoder plugin diff --git a/audio/gstreamer1-plugins-sndfile/Makefile b/audio/gstreamer1-plugins-sndfile/Makefile index ab0b44fed02..f06b1f1e1c0 100644 --- a/audio/gstreamer1-plugins-sndfile/Makefile +++ b/audio/gstreamer1-plugins-sndfile/Makefile @@ -1,6 +1,6 @@ # Created by: Michael Johnson -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio COMMENT= Gstreamer sndfile plugin diff --git a/audio/gstreamer1-plugins-soundtouch/Makefile b/audio/gstreamer1-plugins-soundtouch/Makefile index 803ef6ddb95..b8d9c7f7bed 100644 --- a/audio/gstreamer1-plugins-soundtouch/Makefile +++ b/audio/gstreamer1-plugins-soundtouch/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio COMMENT= GStreamer soundtouch plugin diff --git a/audio/gstreamer1-plugins-speex/Makefile b/audio/gstreamer1-plugins-speex/Makefile index e90385b8334..d1029b4cb05 100644 --- a/audio/gstreamer1-plugins-speex/Makefile +++ b/audio/gstreamer1-plugins-speex/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer speex voice encode/decode plugin diff --git a/audio/gstreamer1-plugins-taglib/Makefile b/audio/gstreamer1-plugins-taglib/Makefile index 2e23a9a6418..79ab2c14c17 100644 --- a/audio/gstreamer1-plugins-taglib/Makefile +++ b/audio/gstreamer1-plugins-taglib/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer taglib plugin for adding APEv2 and ID3v2 headers diff --git a/audio/gstreamer1-plugins-twolame/Makefile b/audio/gstreamer1-plugins-twolame/Makefile index 43efa2f6c2f..b489aa7d813 100644 --- a/audio/gstreamer1-plugins-twolame/Makefile +++ b/audio/gstreamer1-plugins-twolame/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer High-quality free MP2 encoder plugin diff --git a/audio/gstreamer1-plugins-vorbis/Makefile b/audio/gstreamer1-plugins-vorbis/Makefile index 0d94e3c0316..77e85708ae4 100644 --- a/audio/gstreamer1-plugins-vorbis/Makefile +++ b/audio/gstreamer1-plugins-vorbis/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer vorbis encoder/decoder plugin diff --git a/audio/gstreamer1-plugins-wavpack/Makefile b/audio/gstreamer1-plugins-wavpack/Makefile index e989368c4ae..3e176e6a587 100644 --- a/audio/gstreamer1-plugins-wavpack/Makefile +++ b/audio/gstreamer1-plugins-wavpack/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= audio COMMENT= GStreamer wavpack encode/decode plugin diff --git a/audio/gstreamer1-plugins-webrtcdsp/Makefile b/audio/gstreamer1-plugins-webrtcdsp/Makefile index 10fb7d6b05e..559a2732920 100644 --- a/audio/gstreamer1-plugins-webrtcdsp/Makefile +++ b/audio/gstreamer1-plugins-webrtcdsp/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio COMMENT= GStreamer WebRTC Audio Processing plugin diff --git a/audio/gtkguitune/Makefile b/audio/gtkguitune/Makefile index 39f94209847..65dc193f8c9 100644 --- a/audio/gtkguitune/Makefile +++ b/audio/gtkguitune/Makefile @@ -3,7 +3,7 @@ PORTNAME= gtkguitune PORTVERSION= 0.8 DISTVERSIONPREFIX= gtk2- -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= audio MASTER_SITES= GENTOO \ http://www.oocities.org/harpin_floh/mysoft/ diff --git a/audio/gtkpod/Makefile b/audio/gtkpod/Makefile index 711f5f65c0b..0e5fb23e21e 100644 --- a/audio/gtkpod/Makefile +++ b/audio/gtkpod/Makefile @@ -2,7 +2,7 @@ PORTNAME= gtkpod PORTVERSION= 1.0.0 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= audio MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTNAME}-${PORTVERSION} diff --git a/audio/guitarix-lv2/Makefile b/audio/guitarix-lv2/Makefile index a23de916e61..5760c11a519 100644 --- a/audio/guitarix-lv2/Makefile +++ b/audio/guitarix-lv2/Makefile @@ -1,5 +1,6 @@ PORTNAME= guitarix DISTVERSION= 0.43.1 +PORTREVISION= 1 CATEGORIES= audio MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/ PKGNAMESUFFIX= -lv2 diff --git a/audio/icecast-kh/Makefile b/audio/icecast-kh/Makefile index 0e30c39aaee..022ab5bada4 100644 --- a/audio/icecast-kh/Makefile +++ b/audio/icecast-kh/Makefile @@ -3,6 +3,7 @@ PORTNAME= icecast DISTVERSIONPREFIX= icecast- DISTVERSION= 2.4.0-kh15 +PORTREVISION= 1 CATEGORIES= audio net PKGNAMESUFFIX= -kh diff --git a/audio/icecast/Makefile b/audio/icecast/Makefile index 9cb6f3d6e60..0f5d180c3fa 100644 --- a/audio/icecast/Makefile +++ b/audio/icecast/Makefile @@ -2,6 +2,7 @@ PORTNAME= icecast PORTVERSION= 2.4.4 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= audio net MASTER_SITES= https://downloads.xiph.org/releases/icecast/:source \ @@ -25,7 +26,7 @@ LIB_DEPENDS= libcurl.so:ftp/curl \ libtheora.so:multimedia/libtheora \ libvorbis.so:audio/libvorbis -USES= cpe gmake gnome localbase ssl +USES= cpe gmake gnome localbase pkgconfig ssl USE_GNOME= libxml2 libxslt GNU_CONFIGURE= yes diff --git a/audio/icecast/files/patch-configure b/audio/icecast/files/patch-configure new file mode 100644 index 00000000000..9427d3671c3 --- /dev/null +++ b/audio/icecast/files/patch-configure @@ -0,0 +1,410 @@ +--- configure.orig 2018-10-31 18:26:07 UTC ++++ configure +@@ -673,7 +673,13 @@ OGG_LDFLAGS + OGG_CFLAGS + OGG_LIBS + OGG_PREFIX +-XSLTCONFIG ++LIBXML2_LIBS ++LIBXML2_CFLAGS ++LIBXSLT_LIBS ++LIBXSLT_CFLAGS ++PKG_CONFIG_LIBDIR ++PKG_CONFIG_PATH ++PKG_CONFIG + CPP + LT_SYS_LIBRARY_PATH + OTOOL64 +@@ -806,7 +812,6 @@ with_gnu_ld + with_sysroot + enable_libtool_lock + enable_largefile +-with_xslt_config + with_ogg + with_vorbis + with_theora +@@ -826,7 +831,13 @@ LIBS + CPPFLAGS + LT_SYS_LIBRARY_PATH + CPP +-XSLTCONFIG ++PKG_CONFIG ++PKG_CONFIG_PATH ++PKG_CONFIG_LIBDIR ++LIBXSLT_CFLAGS ++LIBXSLT_LIBS ++LIBXML2_CFLAGS ++LIBXML2_LIBS + OGG_PREFIX + VORBIS_PREFIX + THEORA +@@ -1488,7 +1499,6 @@ Optional Packages: + --with-gnu-ld assume the C compiler uses GNU ld [default=no] + --with-sysroot[=DIR] Search for dependent libraries within DIR (or the + compiler's sysroot if not specified). +- --with-xslt-config=PATH use xslt-config in PATH to find libxslt + --with-ogg=PREFIX Prefix where libogg is installed (optional) + --with-vorbis=PREFIX Prefix where libvorbis is installed (optional) + --with-theora=PREFIX Prefix where libtheora is installed (optional) +@@ -1509,7 +1519,19 @@ Some influential environment variables: + LT_SYS_LIBRARY_PATH + User-defined run-time library search path. + CPP C preprocessor +- XSLTCONFIG XSLT configuration program ++ PKG_CONFIG path to pkg-config utility ++ PKG_CONFIG_PATH ++ directories to add to pkg-config's search path ++ PKG_CONFIG_LIBDIR ++ path overriding pkg-config's built-in search path ++ LIBXSLT_CFLAGS ++ C compiler flags for LIBXSLT, overriding pkg-config ++ LIBXSLT_LIBS ++ linker flags for LIBXSLT, overriding pkg-config ++ LIBXML2_CFLAGS ++ C compiler flags for LIBXML2, overriding pkg-config ++ LIBXML2_LIBS ++ linker flags for LIBXML2, overriding pkg-config + OGG_PREFIX path to ogg installation + VORBIS_PREFIX + path to vorbis installation +@@ -13031,22 +13053,66 @@ fi + + + +-# Check whether --with-xslt-config was given. +-if test "${with_xslt_config+set}" = set; then : +- withval=$with_xslt_config; XSLTCONFIG="$withval" ++ ++ ++ ++ ++ ++if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. ++set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if ${ac_cv_path_PKG_CONFIG+:} false; then : ++ $as_echo_n "(cached) " >&6 + else +- for ac_prog in xslt-config ++ case $PKG_CONFIG in ++ [\\/]* | ?:[\\/]*) ++ ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. ++ ;; ++ *) ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH + do +- # Extract the first word of "$ac_prog", so it can be a program name with args. +-set dummy $ac_prog; ac_word=$2 ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ++ ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++ ;; ++esac ++fi ++PKG_CONFIG=$ac_cv_path_PKG_CONFIG ++if test -n "$PKG_CONFIG"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 ++$as_echo "$PKG_CONFIG" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_path_PKG_CONFIG"; then ++ ac_pt_PKG_CONFIG=$PKG_CONFIG ++ # Extract the first word of "pkg-config", so it can be a program name with args. ++set dummy pkg-config; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } +-if ${ac_cv_path_XSLTCONFIG+:} false; then : ++if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : + $as_echo_n "(cached) " >&6 + else +- case $XSLTCONFIG in ++ case $ac_pt_PKG_CONFIG in + [\\/]* | ?:[\\/]*) +- ac_cv_path_XSLTCONFIG="$XSLTCONFIG" # Let the user override the test with a path. ++ ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +@@ -13056,7 +13122,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then +- ac_cv_path_XSLTCONFIG="$as_dir/$ac_word$ac_exec_ext" ++ ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -13067,35 +13133,198 @@ IFS=$as_save_IFS + ;; + esac + fi +-XSLTCONFIG=$ac_cv_path_XSLTCONFIG +-if test -n "$XSLTCONFIG"; then +- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $XSLTCONFIG" >&5 +-$as_echo "$XSLTCONFIG" >&6; } ++ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG ++if test -n "$ac_pt_PKG_CONFIG"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 ++$as_echo "$ac_pt_PKG_CONFIG" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 + $as_echo "no" >&6; } + fi + ++ if test "x$ac_pt_PKG_CONFIG" = x; then ++ PKG_CONFIG="" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ PKG_CONFIG=$ac_pt_PKG_CONFIG ++ fi ++else ++ PKG_CONFIG="$ac_cv_path_PKG_CONFIG" ++fi + +- test -n "$XSLTCONFIG" && break +-done +-test -n "$XSLTCONFIG" || XSLTCONFIG="""" ++fi ++if test -n "$PKG_CONFIG"; then ++ _pkg_min_version=0.9.0 ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 ++$as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; } ++ if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++ else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ PKG_CONFIG="" ++ fi ++fi + ++pkg_failed=no ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBXSLT" >&5 ++$as_echo_n "checking for LIBXSLT... " >&6; } + ++if test -n "$LIBXSLT_CFLAGS"; then ++ pkg_cv_LIBXSLT_CFLAGS="$LIBXSLT_CFLAGS" ++ elif test -n "$PKG_CONFIG"; then ++ if test -n "$PKG_CONFIG" && \ ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libxslt\""; } >&5 ++ ($PKG_CONFIG --exists --print-errors "libxslt") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ pkg_cv_LIBXSLT_CFLAGS=`$PKG_CONFIG --cflags "libxslt" 2>/dev/null` ++ test "x$?" != "x0" && pkg_failed=yes ++else ++ pkg_failed=yes + fi ++ else ++ pkg_failed=untried ++fi ++if test -n "$LIBXSLT_LIBS"; then ++ pkg_cv_LIBXSLT_LIBS="$LIBXSLT_LIBS" ++ elif test -n "$PKG_CONFIG"; then ++ if test -n "$PKG_CONFIG" && \ ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libxslt\""; } >&5 ++ ($PKG_CONFIG --exists --print-errors "libxslt") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ pkg_cv_LIBXSLT_LIBS=`$PKG_CONFIG --libs "libxslt" 2>/dev/null` ++ test "x$?" != "x0" && pkg_failed=yes ++else ++ pkg_failed=yes ++fi ++ else ++ pkg_failed=untried ++fi + +-if test "x$XSLTCONFIG" = "x"; then +- as_fn_error $? "XSLT configuration could not be found" "$LINENO" 5 ++ ++ ++if test $pkg_failed = yes; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ ++if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then ++ _pkg_short_errors_supported=yes ++else ++ _pkg_short_errors_supported=no + fi +-if ! test -x "$XSLTCONFIG"; then +- as_fn_error $? "$XSLTCONFIG cannot be executed" "$LINENO" 5 ++ if test $_pkg_short_errors_supported = yes; then ++ LIBXSLT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libxslt" 2>&1` ++ else ++ LIBXSLT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libxslt" 2>&1` ++ fi ++ # Put the nasty error message in config.log where it belongs ++ echo "$LIBXSLT_PKG_ERRORS" >&5 ++ ++ ++ as_fn_error $? "${LIBXSLT_PKG_ERRORS}. libxslt is required." "$LINENO" 5 ++ ++elif test $pkg_failed = untried; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ ++ as_fn_error $? "${LIBXSLT_PKG_ERRORS}. libxslt is required." "$LINENO" 5 ++ ++else ++ LIBXSLT_CFLAGS=$pkg_cv_LIBXSLT_CFLAGS ++ LIBXSLT_LIBS=$pkg_cv_LIBXSLT_LIBS ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++ + fi +-XSLT_LIBS="$($XSLTCONFIG --libs)" +-XSLT_CFLAGS="$($XSLTCONFIG --cflags)" +-ac_xslt_save_LIBS="$LIBS" +-ac_xslt_save_CFLAGS="$CFLAGS" +-LIBS="$XSLT_LIBS $LIBS" +-CFLAGS="$CFLAGS $XSLT_CFLAGS" ++ ++CFLAGS="${CFLAGS} ${LIBXSLT_CFLAGS}" ++LIBS="${LIBS} ${LIBXSLT_LIBS}" ++ ++ ++pkg_failed=no ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBXML2" >&5 ++$as_echo_n "checking for LIBXML2... " >&6; } ++ ++if test -n "$LIBXML2_CFLAGS"; then ++ pkg_cv_LIBXML2_CFLAGS="$LIBXML2_CFLAGS" ++ elif test -n "$PKG_CONFIG"; then ++ if test -n "$PKG_CONFIG" && \ ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libxml-2.0\""; } >&5 ++ ($PKG_CONFIG --exists --print-errors "libxml-2.0") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ pkg_cv_LIBXML2_CFLAGS=`$PKG_CONFIG --cflags "libxml-2.0" 2>/dev/null` ++ test "x$?" != "x0" && pkg_failed=yes ++else ++ pkg_failed=yes ++fi ++ else ++ pkg_failed=untried ++fi ++if test -n "$LIBXML2_LIBS"; then ++ pkg_cv_LIBXML2_LIBS="$LIBXML2_LIBS" ++ elif test -n "$PKG_CONFIG"; then ++ if test -n "$PKG_CONFIG" && \ ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libxml-2.0\""; } >&5 ++ ($PKG_CONFIG --exists --print-errors "libxml-2.0") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ pkg_cv_LIBXML2_LIBS=`$PKG_CONFIG --libs "libxml-2.0" 2>/dev/null` ++ test "x$?" != "x0" && pkg_failed=yes ++else ++ pkg_failed=yes ++fi ++ else ++ pkg_failed=untried ++fi ++ ++ ++ ++if test $pkg_failed = yes; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ ++if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then ++ _pkg_short_errors_supported=yes ++else ++ _pkg_short_errors_supported=no ++fi ++ if test $_pkg_short_errors_supported = yes; then ++ LIBXML2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libxml-2.0" 2>&1` ++ else ++ LIBXML2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libxml-2.0" 2>&1` ++ fi ++ # Put the nasty error message in config.log where it belongs ++ echo "$LIBXML2_PKG_ERRORS" >&5 ++ ++ ++ as_fn_error $? "${LIBXML2_PKG_ERRORS}. libxml2 is required." "$LINENO" 5 ++ ++elif test $pkg_failed = untried; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ ++ as_fn_error $? "${LIBXML2_PKG_ERRORS}. libxml2 is required." "$LINENO" 5 ++ ++else ++ LIBXML2_CFLAGS=$pkg_cv_LIBXML2_CFLAGS ++ LIBXML2_LIBS=$pkg_cv_LIBXML2_LIBS ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++ ++fi + for ac_func in xsltSaveResultToString + do : + ac_fn_c_check_func "$LINENO" "xsltSaveResultToString" "ac_cv_func_xsltSaveResultToString" +@@ -13107,37 +13336,9 @@ _ACEOF + fi + done + +-CFLAGS="$ac_xslt_save_CFLAGS" +-LIBS="$ac_xslt_save_LIBS" + +- +-for arg in $XSLT_CFLAGS +-do +- if { cat < /dev/null +- then +- XIPH_CPPFLAGS="$XIPH_CPPFLAGS $arg" +- fi +-done +- +- +-xt_compare="$XIPH_LIBS" +-xt_filtered="" +-for arg in $XSLT_LIBS +-do +- if { cat < /dev/null +- then +- xt_compare="$arg $xt_compare" +- xt_filtered="$xt_filtered $arg" +- fi +-done +-XIPH_LIBS="$xt_filtered $XIPH_LIBS" +- ++CFLAGS="${CFLAGS} ${LIBXML2_CFLAGS}" ++LIBS="${LIBS} ${LIBXML2_LIBS}" + + + diff --git a/audio/ices/Makefile b/audio/ices/Makefile index 3daa5635492..86dfbb6f2d7 100644 --- a/audio/ices/Makefile +++ b/audio/ices/Makefile @@ -2,6 +2,7 @@ PORTNAME= ices PORTVERSION= 2.0.3 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= audio net MASTER_SITES= https://downloads.xiph.org/releases/ices/ \ diff --git a/audio/jalv-select/Makefile b/audio/jalv-select/Makefile index f34da431b5f..feec36ad026 100644 --- a/audio/jalv-select/Makefile +++ b/audio/jalv-select/Makefile @@ -1,6 +1,7 @@ PORTNAME= jalv-select DISTVERSIONPREFIX= v DISTVERSION= 1.3 +PORTREVISION= 1 CATEGORIES= audio MAINTAINER= yuri@FreeBSD.org diff --git a/audio/jalv/Makefile b/audio/jalv/Makefile index b8bd34a6909..0dbd5f7bb35 100644 --- a/audio/jalv/Makefile +++ b/audio/jalv/Makefile @@ -2,7 +2,7 @@ PORTNAME= jalv DISTVERSION= 1.6.6 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio MASTER_SITES= http://download.drobilla.net/ diff --git a/audio/kid3-qt5/Makefile b/audio/kid3-qt5/Makefile index 40cf8f8875d..cb3a9b8c53a 100644 --- a/audio/kid3-qt5/Makefile +++ b/audio/kid3-qt5/Makefile @@ -1,6 +1,6 @@ PORTNAME= kid3 PORTVERSION= 3.9.1 -PORTREVISION?= 0 +PORTREVISION?= 1 CATEGORIES= audio kde MASTER_SITES= KDE/stable/${PORTNAME}/${PORTVERSION} PKGNAMESUFFIX= -${SLAVE} diff --git a/audio/lame/Makefile b/audio/lame/Makefile index 3c49c401b28..28e5ab88ca6 100644 --- a/audio/lame/Makefile +++ b/audio/lame/Makefile @@ -2,7 +2,7 @@ PORTNAME= lame PORTVERSION= 3.100 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= audio MASTER_SITES= SF/${PORTNAME:tl}/${PORTNAME:tl}/3.100 @@ -20,7 +20,12 @@ CPE_VENDOR= lame_project DOC_FILES= API LICENSE README TODO USAGE -OPTIONS_DEFINE= DOCS +OPTIONS_DEFINE= DOCS SNDFILE +OPTIONS_DEFAULT=DOCS + +SNDFILE_CONFIGURE_ON= --with-fileio=sndfile +SNDFILE_CONFIGURE_OFF= --with-fileio=lame +SNDFILE_LIB_DEPENDS= libsndfile.so:audio/libsndfile .include diff --git a/audio/lash/Makefile b/audio/lash/Makefile index 0ddc2f5324e..50412a8bb4e 100644 --- a/audio/lash/Makefile +++ b/audio/lash/Makefile @@ -2,7 +2,7 @@ PORTNAME= lash PORTVERSION= 0.5.4 -PORTREVISION= 15 +PORTREVISION= 16 CATEGORIES= audio MASTER_SITES= SAVANNAH diff --git a/audio/libgpod-sharp/Makefile b/audio/libgpod-sharp/Makefile index 2c16045944a..924e349a505 100644 --- a/audio/libgpod-sharp/Makefile +++ b/audio/libgpod-sharp/Makefile @@ -1,5 +1,6 @@ # Created by: David Naylor +PORTREVISION= 1 PKGNAMESUFFIX= -sharp MAINTAINER= mono@FreeBSD.org diff --git a/audio/libgpod/Makefile b/audio/libgpod/Makefile index 23340dd7837..4bfbea34f2e 100644 --- a/audio/libgpod/Makefile +++ b/audio/libgpod/Makefile @@ -2,7 +2,7 @@ PORTNAME= libgpod PORTVERSION= 0.8.3 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= audio MASTER_SITES= SF/gtkpod/${PORTNAME}/${PORTNAME}-${PORTVERSION:R} diff --git a/audio/libmusicbrainz5/Makefile b/audio/libmusicbrainz5/Makefile index 0103a0e79bf..3427d6d85cc 100644 --- a/audio/libmusicbrainz5/Makefile +++ b/audio/libmusicbrainz5/Makefile @@ -1,6 +1,7 @@ PORTNAME= libmusicbrainz DISTVERSIONPREFIX= release- DISTVERSION= 5.1.0-19 +PORTREVISION= 1 DISTVERSIONSUFFIX= -ge29cc9a CATEGORIES= audio PKGNAMESUFFIX= 5 diff --git a/audio/mate-media/Makefile b/audio/mate-media/Makefile index 88ab6c3a442..fce79e26853 100644 --- a/audio/mate-media/Makefile +++ b/audio/mate-media/Makefile @@ -2,6 +2,7 @@ PORTNAME= mate-media PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= audio mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/audio/muse-sequencer/Makefile b/audio/muse-sequencer/Makefile index d4fc9aaa878..44feeb163fb 100644 --- a/audio/muse-sequencer/Makefile +++ b/audio/muse-sequencer/Makefile @@ -1,6 +1,6 @@ PORTNAME= muse-sequencer DISTVERSION= 4.0.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio MAINTAINER= yuri@FreeBSD.org diff --git a/audio/paprefs/Makefile b/audio/paprefs/Makefile index e32aa9ad21b..e31451edd53 100644 --- a/audio/paprefs/Makefile +++ b/audio/paprefs/Makefile @@ -2,6 +2,7 @@ PORTNAME= paprefs PORTVERSION= 1.2 +PORTREVISION= 1 CATEGORIES= audio MASTER_SITES= http://freedesktop.org/software/pulseaudio/${PORTNAME}/ diff --git a/audio/pavucontrol/Makefile b/audio/pavucontrol/Makefile index f4b61b7e3cd..ba4c52868f3 100644 --- a/audio/pavucontrol/Makefile +++ b/audio/pavucontrol/Makefile @@ -2,6 +2,7 @@ PORTNAME= pavucontrol PORTVERSION= 5.0 +PORTREVISION= 1 CATEGORIES= audio MASTER_SITES= http://freedesktop.org/software/pulseaudio/${PORTNAME}/ diff --git a/audio/plasma5-plasma-pa/Makefile b/audio/plasma5-plasma-pa/Makefile index 9ce13f697c3..de6f501ec99 100644 --- a/audio/plasma5-plasma-pa/Makefile +++ b/audio/plasma5-plasma-pa/Makefile @@ -1,5 +1,6 @@ PORTNAME= plasma-pa DISTVERSION= ${KDE_PLASMA_VERSION} +PORTREVISION= 1 CATEGORIES= audio kde kde-plasma MAINTAINER= kde@FreeBSD.org diff --git a/audio/psindustrializer/Makefile b/audio/psindustrializer/Makefile index b91ec19803b..4b49b8f2b1d 100644 --- a/audio/psindustrializer/Makefile +++ b/audio/psindustrializer/Makefile @@ -1,6 +1,6 @@ PORTNAME= psindustrializer DISTVERSION= 0.2.7 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= audio MASTER_SITES= SF/industrializer/ diff --git a/audio/pulseeffects/Makefile b/audio/pulseeffects/Makefile index e46e71d13a9..3be022e5d5d 100644 --- a/audio/pulseeffects/Makefile +++ b/audio/pulseeffects/Makefile @@ -1,7 +1,7 @@ PORTNAME= pulseeffects DISTVERSIONPREFIX= v DISTVERSION= 4.6.8 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= audio MAINTAINER= daniel@shafer.cc diff --git a/audio/py-apetag/files/patch-2to3 b/audio/py-apetag/files/patch-2to3 new file mode 100644 index 00000000000..b487b14f373 --- /dev/null +++ b/audio/py-apetag/files/patch-2to3 @@ -0,0 +1,412 @@ +--- ApeTag.py.orig 2007-11-07 01:15:19 UTC ++++ ApeTag.py +@@ -104,6 +104,7 @@ APEv2 specification is here: + + from os.path import isfile as _isfile + from struct import pack as _pack, unpack as _unpack ++from functools import reduce + + # Variable definitions + +@@ -115,7 +116,7 @@ _tagmustexistcommands = 'update getfields getrawtag'.s + _stringallowedcommands = 'getrawtag getnewrawtag getfields hastag'.split() + _filelikeattrs = 'flush read seek tell truncate write'.split() + _badapeitemkeys = 'id3 tag oggs mp+'.split() +-_badapeitemkeychars = ''.join([chr(x) for x in range(32) + range(128,256)]) ++_badapeitemkeychars = ''.join([chr(x) for x in list(range(32)) + list(range(128,256))]) + _apeitemtypes = 'utf8 binary external reserved'.split() + _apeheaderflags = "\x00\x00\xA0" + _apefooterflags = "\x00\x00\x80" +@@ -160,7 +161,7 @@ del i + + # Classes + +-class TagError(StandardError): ++class TagError(Exception): + '''Raised when there is an error during a tagging operation''' + pass + +@@ -171,16 +172,16 @@ class ApeItem(list): + if key is None: + return + if not self.validkey(key): +- raise TagError, 'Invalid item key for ape tag item: %r' % key ++ raise TagError('Invalid item key for ape tag item: %r' % key) + if type not in _apeitemtypes: +- raise TagError, 'Invalid item type for ape tag item: %r' % type ++ raise TagError('Invalid item type for ape tag item: %r' % type) + self.key = key + self.readonly = bool(readonly) + self.type = type +- if isinstance(values, basestring): ++ if isinstance(values, str): + values = [values] + if type == 'utf8' or type == 'external': +- values = [unicode(value) for value in values] ++ values = [str(value) for value in values] + self.extend(values) + + def maketag(self): +@@ -201,26 +202,26 @@ class ApeItem(list): + del self[:] + itemlength = _unpack(" 3: +- raise TagError, 'Corrupt tag, invalid item flags, bits 3-7 ' \ +- 'nonzero at position %i' % curpos ++ raise TagError('Corrupt tag, invalid item flags, bits 3-7 ' \ ++ 'nonzero at position %i' % curpos) + self.type = _apeitemtypes[type] + self.readonly = bool(readonly) + curpos += 8 + keyend = data.find("\x00", curpos) + if keyend < curpos: +- raise TagError, 'Corrupt tag, unterminated item key at position ' \ +- '%i' % curpos ++ raise TagError('Corrupt tag, unterminated item key at position ' \ ++ '%i' % curpos) + itemkey = data[curpos:keyend] + if not self.validkey(itemkey): +- raise TagError, 'Corrupt tag, invalid item key at position ' \ +- '%i: %r' % (curpos, itemkey) ++ raise TagError('Corrupt tag, invalid item key at position ' \ ++ '%i: %r' % (curpos, itemkey)) + self.key = itemkey + curpos = keyend + itemlength + 1 + itemvalue = data[keyend+1:curpos] +@@ -246,30 +247,30 @@ def _ape(fil, action, callback = None, callbackkwargs + + if _apepreamble != data[:12]: + if action in _tagmustexistcommands: +- raise TagError, "Nonexistant or corrupt tag, can't %s" % action ++ raise TagError("Nonexistant or corrupt tag, can't %s" % action) + elif action == "delete": + return 0 + data = '' + tagstart = filesize - len(id3data) + elif _apefooterflags != data[21:24] or \ + (data[20] != '\0' and data[20] != '\1'): +- raise TagError, "Bad tag footer flags" ++ raise TagError("Bad tag footer flags") + else: + # file has a valid APE footer + apesize = _unpack(" _maxapesize: +- raise TagError, 'Existing tag is too large: %i bytes' % apesize ++ raise TagError('Existing tag is too large: %i bytes' % apesize) + if apesize + len(id3data) > filesize: +- raise TagError, 'Existing tag says it is larger than the file: ' \ +- '%i bytes' % apesize ++ raise TagError('Existing tag says it is larger than the file: ' \ ++ '%i bytes' % apesize) + fil.seek(-apesize - len(id3data), 2) + tagstart = fil.tell() + data = fil.read(apesize) + if _apepreamble != data[:12] or _apeheaderflags != data[21:24] or \ + (data[20] != '\0' and data[20] != '\1'): +- raise TagError, 'Nonexistent or corrupt tag, missing tag header' ++ raise TagError('Nonexistent or corrupt tag, missing tag header') + if apesize != _unpack(" _maxapesize: +- raise TagError, 'New tag is too large: %i bytes' % len(data) ++ raise TagError('New tag is too large: %i bytes' % len(data)) + + if updateid3: + if action == 'replace': + id3data = '' + elif action != 'create' and not id3data: +- raise TagError, "Nonexistant or corrupt tag, can't %s" % action ++ raise TagError("Nonexistant or corrupt tag, can't %s" % action) + if callable(updateid3): + id3data = _id3(id3data, "getnewrawtag", updateid3, callbackkwargs) + else: +@@ -330,7 +331,7 @@ def _ape(fil, action, callback = None, callbackkwargs + def _apefieldstoid3fields(fields): + '''Convert APE tag fields to ID3 tag fields ''' + id3fields = {} +- for key, value in fields.iteritems(): ++ for key, value in fields.items(): + key = key.lower() + if isinstance(value, (list, tuple)): + if not value: +@@ -347,7 +348,7 @@ def _apefieldstoid3fields(fields): + else: + id3fields['track'] = 0 + elif key == 'genre': +- if isinstance(value, basestring) and value.lower() in _id3genresdict: ++ if isinstance(value, str) and value.lower() in _id3genresdict: + id3fields[key] = value + else: + id3fields[key] = '' +@@ -357,7 +358,7 @@ def _apefieldstoid3fields(fields): + except ValueError: + pass + elif key in _id3fields: +- if isinstance(value, unicode): ++ if isinstance(value, str): + value = value.encode('utf8') + id3fields[key] = value + return id3fields +@@ -367,28 +368,28 @@ _apelengthreduce = lambda i1, i2: i1 + len(i2) + def _checkargs(fil, action): + '''Check that arguments are valid, convert them, or raise an error''' + if not (isinstance(action,str) and action.lower() in _commands): +- raise TagError, "%r is not a valid action" % action ++ raise TagError("%r is not a valid action" % action) + action = action.lower() + fil = _getfileobj(fil, action) + for attr in _filelikeattrs: + if not hasattr(fil, attr) or not callable(getattr(fil, attr)): +- raise TagError, "fil does not support method %r" % attr ++ raise TagError("fil does not support method %r" % attr) + return fil, action + + def _checkfields(fields): + '''Check that the fields quacks like a dict''' + if not hasattr(fields, 'items') or not callable(fields.items): +- raise TagError, "fields does not support method 'items'" ++ raise TagError("fields does not support method 'items'") + + def _checkremovefields(removefields): + '''Check that removefields is iterable''' + if not hasattr(removefields, '__iter__') \ + or not callable(removefields.__iter__): +- raise TagError, "removefields is not an iterable" ++ raise TagError("removefields is not an iterable") + + def _getfileobj(fil, action): + '''Return a file object if given a filename, otherwise return file''' +- if isinstance(fil, basestring) and _isfile(fil): ++ if isinstance(fil, str) and _isfile(fil): + if action in _stringallowedcommands: + mode = 'rb' + else: +@@ -423,7 +424,7 @@ def _id3(fil, action, callback = None, callbackkwargs= + '''Get or Modify ID3 tag for file''' + if isinstance(fil, str): + if action not in _stringallowedcommands: +- raise TagError, "String not allowed for %s action" % action ++ raise TagError("String not allowed for %s action" % action) + data = fil + else: + fil.seek(0, 2) +@@ -438,7 +439,7 @@ def _id3(fil, action, callback = None, callbackkwargs= + if action == "delete": + return 0 + if action in _tagmustexistcommands: +- raise TagError, "Nonexistant or corrupt tag, can't %s" % action ++ raise TagError("Nonexistant or corrupt tag, can't %s" % action) + data = '' + else: + tagstart -= 128 +@@ -473,7 +474,7 @@ def _id3(fil, action, callback = None, callbackkwargs= + + def _makeapev2tag(apeitems): + '''Construct an APE tag string from a dict of ApeItems''' +- apeentries = [item.maketag() for item in apeitems.itervalues()] ++ apeentries = [item.maketag() for item in apeitems.values()] + apeentries.sort(_sortapeitems) + apesize = _pack(" (len(data) - 32)/11: +- raise TagError, 'Corrupt tag, specifies more items that is possible ' \ +- 'given space remaining: %i items' % numitems ++ raise TagError('Corrupt tag, specifies more items that is possible ' \ ++ 'given space remaining: %i items' % numitems) + curpos = 32 + tagitemend = len(data) - 32 + for x in range(numitems): + if curpos >= tagitemend: +- raise TagError, 'Corrupt tag, end of tag reached with more items' \ +- 'specified' ++ raise TagError('Corrupt tag, end of tag reached with more items' \ ++ 'specified') + item = ApeItem() + curpos = item.parsetag(data, curpos) + itemkey = item.key.lower() + if itemkey in apeitems: +- raise TagError, 'Corrupt tag, duplicate item key: %r' % itemkey ++ raise TagError('Corrupt tag, duplicate item key: %r' % itemkey) + apeitems[itemkey] = item + if tagitemend - curpos: +- raise TagError, 'Corrupt tag, parsing complete but not at end ' \ +- 'of input: %i bytes remaining' % (len(data) - curpos) ++ raise TagError('Corrupt tag, parsing complete but not at end ' \ ++ 'of input: %i bytes remaining' % (len(data) - curpos)) + return apeitems + + def _parseid3tag(data): + '''Parse an ID3 tag and return a dictionary of tag fields''' + fields = {} +- for key,(start,end) in _id3fields.iteritems(): ++ for key,(start,end) in _id3fields.items(): + fields[key] = data[start:end].rstrip("\x00") + if data[125] == "\x00": + # ID3v1.1 tags have tracks +@@ -575,30 +576,30 @@ def _parseid3tag(data): + + def _printapeitems(apeitems): + '''Pretty print given APE Items''' +- items = apeitems.items() ++ items = list(apeitems.items()) + items.sort() +- print 'APE Tag\n-------' ++ print('APE Tag\n-------') + for key, value in items: + if value.readonly: + key = '[read only] %s' % key + if value.type == 'utf8': +- value = u', '.join([v.encode('ascii', 'replace') for v in value]) ++ value = ', '.join([v.encode('ascii', 'replace') for v in value]) + else: + key = '[%s] %s' % (value.type, key) + if value.type == 'binary': + value = '[binary data]' + else: + value = ', '.join(value) +- print '%s: %s' % (key, value) ++ print('%s: %s' % (key, value)) + + def _printid3items(tagfields): + '''Pretty print given ID3 Fields''' +- items = tagfields.items() ++ items = list(tagfields.items()) + items.sort() +- print 'ID3 Tag\n-------' ++ print('ID3 Tag\n-------') + for key, value in items: + if value: +- print '%s: %s' % (key, value) ++ print('%s: %s' % (key, value)) + + def _removeapeitems(apeitems, removefields): + '''Remove items from the APE tag''' +@@ -609,7 +610,7 @@ def _removeapeitems(apeitems, removefields): + def _restoredictcase(apeitems): + '''Restore the case of the dictionary keys for the ApeItems''' + fixeditems = {} +- for value in apeitems.itervalues(): ++ for value in apeitems.values(): + fixeditems[value.key] = value + return fixeditems + +@@ -634,13 +635,13 @@ def _tag(function, fil, action="update", *args, **kwar + try: + return function(fil, action, *args, **kwargs) + finally: +- if isinstance(origfil, basestring): ++ if isinstance(origfil, str): + # filename given as an argument, close file object + fil.close() + + def _updateapeitems(apeitems, fields): + '''Add/Update apeitems using data from fields''' +- for key, value in fields.iteritems(): ++ for key, value in fields.items(): + if isinstance(value, ApeItem): + apeitems[value.key.lower()] = value + else: +@@ -655,7 +656,7 @@ def _updateapetagcallback(apeitems, fields={}, removef + + def _updateid3fields(tagfields, fields): + '''Update ID3v1 tagfields using fields''' +- for field, value in fields.iteritems(): ++ for field, value in fields.items(): + if isinstance(field, str): + tagfields[field.lower()] = value + return tagfields +@@ -806,10 +807,10 @@ if __name__ == '__main__': + import sys + for filename in sys.argv[1:]: + if _isfile(filename): +- print '\n%s' % filename ++ print('\n%s' % filename) + try: + printtags(filename) + except TagError: +- print 'Missing APE or ID3 Tag' ++ print('Missing APE or ID3 Tag') + else: +- print "%s: file doesn't exist" % filename ++ print("%s: file doesn't exist" % filename) +--- test_ApeTag.py.orig 2007-11-07 01:12:54 UTC ++++ test_ApeTag.py +@@ -1,6 +1,6 @@ + #!/usr/bin/env python + import ApeTag +-import cStringIO ++import io + import unittest + import os.path + +@@ -29,7 +29,7 @@ def rr(string, position, characters, io = True): + return s + + def sio(string): +- x = cStringIO.StringIO() ++ x = io.StringIO() + x.write(string) + return x + diff --git a/audio/py-mpd/files/patch-2to3 b/audio/py-mpd/files/patch-2to3 new file mode 100644 index 00000000000..6c8cb06ad09 --- /dev/null +++ b/audio/py-mpd/files/patch-2to3 @@ -0,0 +1,11 @@ +--- mpd.py.orig 2010-12-14 00:46:05 UTC ++++ mpd.py +@@ -396,7 +396,7 @@ class MPDClient(object): + sock = socket.socket(af, socktype, proto) + sock.connect(sa) + return sock +- except socket.error, err: ++ except socket.error as err: + if sock is not None: + sock.close() + if err is not None: diff --git a/audio/qtractor/Makefile b/audio/qtractor/Makefile index e9aa9be8f48..fa8c7b6fff9 100644 --- a/audio/qtractor/Makefile +++ b/audio/qtractor/Makefile @@ -1,7 +1,7 @@ PORTNAME= qtractor DISTVERSIONPREFIX= qtractor_ DISTVERSION= 0_9_25 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio MASTER_SITES= https://github.com/rncbc/qtractor/archive/refs/tags/ DISTNAME= ${PORTNAME}_${DISTVERSION} diff --git a/audio/rhythmbox/Makefile b/audio/rhythmbox/Makefile index 4576726a716..5b8e7580bce 100644 --- a/audio/rhythmbox/Makefile +++ b/audio/rhythmbox/Makefile @@ -2,7 +2,7 @@ PORTNAME= rhythmbox PORTVERSION= 3.4.4 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/audio/tagtool/Makefile b/audio/tagtool/Makefile index e641bf1d488..f96e3f59b6e 100644 --- a/audio/tagtool/Makefile +++ b/audio/tagtool/Makefile @@ -3,7 +3,7 @@ PORTNAME= tagtool PORTVERSION= 0.12.3 -PORTREVISION= 15 +PORTREVISION= 16 CATEGORIES= audio MASTER_SITES= SF diff --git a/audio/vst3sdk/Makefile b/audio/vst3sdk/Makefile index bf258b590ff..66f8cb0bd5e 100644 --- a/audio/vst3sdk/Makefile +++ b/audio/vst3sdk/Makefile @@ -1,6 +1,7 @@ PORTNAME= vst3sdk DISTVERSIONPREFIX= v DISTVERSION= 3.7.1_build_50-1 +PORTREVISION= 1 DISTVERSIONSUFFIX= -g8199057 CATEGORIES= audio diff --git a/audio/zrythm/Makefile b/audio/zrythm/Makefile index 427c49d824f..0ca94966b3b 100644 --- a/audio/zrythm/Makefile +++ b/audio/zrythm/Makefile @@ -1,7 +1,7 @@ PORTNAME= zrythm DISTVERSIONPREFIX= v DISTVERSION= 1.0.0-alpha.26.0.13 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= audio MAINTAINER= yuri@FreeBSD.org diff --git a/benchmarks/py-naarad/files/patch-2to3 b/benchmarks/py-naarad/files/patch-2to3 new file mode 100644 index 00000000000..51ee552bbb8 --- /dev/null +++ b/benchmarks/py-naarad/files/patch-2to3 @@ -0,0 +1,9 @@ +--- src/naarad/run_steps/local_cmd.py.orig 2015-04-23 21:55:51 UTC ++++ src/naarad/run_steps/local_cmd.py +@@ -82,5 +82,5 @@ class Local_Cmd(Run_Step): + if self.process.poll() is None: + self.process.kill() + logger.warning('Waited %d seconds for run_step to terminate. Killing now....', CONSTANTS.SECONDS_TO_KILL_AFTER_SIGTERM) +- except OSError, e: ++ except OSError as e: + logger.error('Error while trying to kill the subprocess: %s', e) diff --git a/biology/checkm/Makefile b/biology/checkm/Makefile index 302c1ba374d..6bb02b8266d 100644 --- a/biology/checkm/Makefile +++ b/biology/checkm/Makefile @@ -15,7 +15,7 @@ RUN_DEPENDS= ${PYNUMPY} \ ${PYTHON_PKGNAMEPREFIX}pysam>=0.8.3:biology/py-pysam@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}scipy>=0.9.0:science/py-scipy@${PY_FLAVOR} -USES= python:3.7+ +USES= dos2unix python:3.7+ USE_GITHUB= yes GH_ACCOUNT= Ecogenomics USE_PYTHON= distutils noflavors autoplist diff --git a/biology/checkm/files/patch-2to3 b/biology/checkm/files/patch-2to3 new file mode 100644 index 00000000000..04b37972c86 --- /dev/null +++ b/biology/checkm/files/patch-2to3 @@ -0,0 +1,648 @@ +--- checkm/binTools.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/binTools.py +@@ -26,7 +26,7 @@ import gzip + + import numpy as np + +-from common import binIdFromFilename, checkFileExists, readDistribution, findNearest ++from .common import binIdFromFilename, checkFileExists, readDistribution, findNearest + from checkm.util.seqUtils import readFasta, writeFasta, baseCount + from checkm.genomicSignatures import GenomicSignatures + from checkm.prodigal import ProdigalGeneFeatureParser +@@ -123,34 +123,34 @@ class BinTools(): + seqId = line[1:].split(None, 1)[0] + + if seqId in seqIds: +- print ' [Warning] Sequence %s found multiple times in bin %s.' % (seqId, binId) ++ print(' [Warning] Sequence %s found multiple times in bin %s.' % (seqId, binId)) + seqIds.add(seqId) + + binSeqs[binId] = seqIds + + # check for sequences assigned to multiple bins + bDuplicates = False +- binIds = binSeqs.keys() +- for i in xrange(0, len(binIds)): +- for j in xrange(i + 1, len(binIds)): ++ binIds = list(binSeqs.keys()) ++ for i in range(0, len(binIds)): ++ for j in range(i + 1, len(binIds)): + seqInter = set(binSeqs[binIds[i]]).intersection(set(binSeqs[binIds[j]])) + + if len(seqInter) > 0: + bDuplicates = True +- print ' Sequences shared between %s and %s: ' % (binIds[i], binIds[j]) ++ print(' Sequences shared between %s and %s: ' % (binIds[i], binIds[j])) + for seqId in seqInter: +- print ' ' + seqId +- print '' ++ print(' ' + seqId) ++ print('') + + if not bDuplicates: +- print ' No sequences assigned to multiple bins.' ++ print(' No sequences assigned to multiple bins.') + + def gcDist(self, seqs): + """GC statistics for bin.""" + GCs = [] + gcTotal = 0 + basesTotal = 0 +- for _, seq in seqs.iteritems(): ++ for _, seq in seqs.items(): + a, c, g, t = baseCount(seq) + gc = g + c + bases = a + c + g + t +@@ -171,7 +171,7 @@ class BinTools(): + + codingBasesTotal = 0 + basesTotal = 0 +- for seqId, seq in seqs.iteritems(): ++ for seqId, seq in seqs.items(): + codingBases = prodigalParser.codingBases(seqId) + + CDs.append(float(codingBases) / len(seq)) +@@ -186,11 +186,11 @@ class BinTools(): + def binTetraSig(self, seqs, tetraSigs): + """Tetranucleotide signature for bin. """ + binSize = 0 +- for _, seq in seqs.iteritems(): ++ for _, seq in seqs.items(): + binSize += len(seq) + + bInit = True +- for seqId, seq in seqs.iteritems(): ++ for seqId, seq in seqs.items(): + weightedTetraSig = tetraSigs[seqId] * (float(len(seq)) / binSize) + if bInit: + binSig = weightedTetraSig +@@ -247,32 +247,32 @@ class BinTools(): + meanCD, deltaCDs, CDs = self.codingDensityDist(seqs, prodigalParser) + + # find keys into GC and CD distributions +- closestGC = findNearest(np.array(gcBounds.keys()), meanGC) +- sampleSeqLen = gcBounds[closestGC].keys()[0] ++ closestGC = findNearest(np.array(list(gcBounds.keys())), meanGC) ++ sampleSeqLen = list(gcBounds[closestGC].keys())[0] + d = gcBounds[closestGC][sampleSeqLen] +- gcLowerBoundKey = findNearest(d.keys(), (100 - distribution) / 2.0) +- gcUpperBoundKey = findNearest(d.keys(), (100 + distribution) / 2.0) ++ gcLowerBoundKey = findNearest(list(d.keys()), (100 - distribution) / 2.0) ++ gcUpperBoundKey = findNearest(list(d.keys()), (100 + distribution) / 2.0) + +- closestCD = findNearest(np.array(cdBounds.keys()), meanCD) +- sampleSeqLen = cdBounds[closestCD].keys()[0] ++ closestCD = findNearest(np.array(list(cdBounds.keys())), meanCD) ++ sampleSeqLen = list(cdBounds[closestCD].keys())[0] + d = cdBounds[closestCD][sampleSeqLen] +- cdLowerBoundKey = findNearest(d.keys(), (100 - distribution) / 2.0) ++ cdLowerBoundKey = findNearest(list(d.keys()), (100 - distribution) / 2.0) + +- tdBoundKey = findNearest(tdBounds[tdBounds.keys()[0]].keys(), distribution) ++ tdBoundKey = findNearest(list(tdBounds[list(tdBounds.keys())[0]].keys()), distribution) + + index = 0 +- for seqId, seq in seqs.iteritems(): ++ for seqId, seq in seqs.items(): + seqLen = len(seq) + + # find GC, CD, and TD bounds +- closestSeqLen = findNearest(gcBounds[closestGC].keys(), seqLen) ++ closestSeqLen = findNearest(list(gcBounds[closestGC].keys()), seqLen) + gcLowerBound = gcBounds[closestGC][closestSeqLen][gcLowerBoundKey] + gcUpperBound = gcBounds[closestGC][closestSeqLen][gcUpperBoundKey] + +- closestSeqLen = findNearest(cdBounds[closestCD].keys(), seqLen) ++ closestSeqLen = findNearest(list(cdBounds[closestCD].keys()), seqLen) + cdLowerBound = cdBounds[closestCD][closestSeqLen][cdLowerBoundKey] + +- closestSeqLen = findNearest(tdBounds.keys(), seqLen) ++ closestSeqLen = findNearest(list(tdBounds.keys()), seqLen) + tdBound = tdBounds[closestSeqLen][tdBoundKey] + + outlyingDists = [] +--- checkm/checkmData.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/checkmData.py +@@ -85,11 +85,11 @@ class DBConfig(object): + """Work out if we have permission to write to the CheckM config before attempting to make changes""" + try: + open(self.configFile, 'a') +- except IOError, e: +- print "You do not seem to have permission to edit the checkm config file" +- print "located at %s" % self.configFile +- print "Please try again with updated privileges. Error was:\n" +- print e ++ except IOError as e: ++ print("You do not seem to have permission to edit the checkm config file") ++ print("located at %s" % self.configFile) ++ print("Please try again with updated privileges. Error was:\n") ++ print(e) + return False + return True + +@@ -167,28 +167,28 @@ class DBManager(mm.ManifestManager): + else: + path = os.path.abspath(os.path.expanduser(path)) + +- print "" ++ print("") + if os.path.exists(path): + # path exists + if os.access(path, os.W_OK): + # path is writable + path_set = True +- print "Path [%s] exists and you have permission to write to this folder." % path ++ print("Path [%s] exists and you have permission to write to this folder." % path) + else: +- print "Path [%s] exists but you do not have permission to write to this folder." % path ++ print("Path [%s] exists but you do not have permission to write to this folder." % path) + else: + # path does not exist, try to make it + "Path [%s] does not exist so I will attempt to create it" % path + try: + self.makeSurePathExists(path) +- print "Path [%s] has been created and you have permission to write to this folder." % path ++ print("Path [%s] has been created and you have permission to write to this folder." % path) + path_set = True + except Exception: +- print "Unable to make the folder, Error was: %s" % sys.exc_info()[0] ++ print("Unable to make the folder, Error was: %s" % sys.exc_info()[0]) + minimal = True + + # (re)make the manifest file +- print "(re) creating manifest file (please be patient)." ++ print("(re) creating manifest file (please be patient).") + self.createManifest(path, self.config.values["localManifestName"]) + + return path +@@ -196,8 +196,8 @@ class DBManager(mm.ManifestManager): + def checkPermissions(self): + """See if the user has permission to write to the data directory""" + if not os.access(self.config.values["dataRoot"], os.W_OK): +- print "You do not seem to have permission to edit the CheckM data folder" +- print "located at %s" % self.config.values["dataRoot"] ++ print("You do not seem to have permission to edit the CheckM data folder") ++ print("located at %s" % self.config.values["dataRoot"]) + return False + + return True +--- checkm/coverage.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/coverage.py +@@ -62,7 +62,7 @@ class Coverage(): + binId = binIdFromFilename(binFile) + + seqs = readFasta(binFile) +- for seqId, seq in seqs.iteritems(): ++ for seqId, seq in seqs.items(): + seqIdToBinId[seqId] = binId + seqIdToSeqLen[seqId] = len(seq) + +@@ -97,12 +97,12 @@ class Coverage(): + print(header) + + # get length of all seqs +- for bamFile, seqIds in coverageInfo.iteritems(): +- for seqId in seqIds.keys(): ++ for bamFile, seqIds in coverageInfo.items(): ++ for seqId in list(seqIds.keys()): + seqIdToSeqLen[seqId] = seqIds[seqId].seqLen + + # write coverage stats for all scaffolds to file +- for seqId, seqLen in seqIdToSeqLen.iteritems(): ++ for seqId, seqLen in seqIdToSeqLen.items(): + rowStr = seqId + '\t' + seqIdToBinId.get(seqId, DefaultValues.UNBINNED) + '\t' + str(seqLen) + for bamFile in bamFiles: + bamId = binIdFromFilename(bamFile) +@@ -171,7 +171,7 @@ class Coverage(): + writeProc.join() + except: + # make sure all processes are terminated +- print traceback.format_exc() ++ print(traceback.format_exc()) + for p in workerProc: + p.terminate() + +@@ -271,16 +271,16 @@ class Coverage(): + if self.logger.getEffectiveLevel() <= logging.INFO: + sys.stderr.write('\n') + +- print '' +- print ' # total reads: %d' % totalReads +- print ' # properly mapped reads: %d (%.1f%%)' % (totalMappedReads, float(totalMappedReads) * 100 / totalReads) +- print ' # duplicate reads: %d (%.1f%%)' % (totalDuplicates, float(totalDuplicates) * 100 / totalReads) +- print ' # secondary reads: %d (%.1f%%)' % (totalSecondary, float(totalSecondary) * 100 / totalReads) +- print ' # reads failing QC: %d (%.1f%%)' % (totalFailedQC, float(totalFailedQC) * 100 / totalReads) +- print ' # reads failing alignment length: %d (%.1f%%)' % (totalFailedAlignLen, float(totalFailedAlignLen) * 100 / totalReads) +- print ' # reads failing edit distance: %d (%.1f%%)' % (totalFailedEditDist, float(totalFailedEditDist) * 100 / totalReads) +- print ' # reads not properly paired: %d (%.1f%%)' % (totalFailedProperPair, float(totalFailedProperPair) * 100 / totalReads) +- print '' ++ print('') ++ print(' # total reads: %d' % totalReads) ++ print(' # properly mapped reads: %d (%.1f%%)' % (totalMappedReads, float(totalMappedReads) * 100 / totalReads)) ++ print(' # duplicate reads: %d (%.1f%%)' % (totalDuplicates, float(totalDuplicates) * 100 / totalReads)) ++ print(' # secondary reads: %d (%.1f%%)' % (totalSecondary, float(totalSecondary) * 100 / totalReads)) ++ print(' # reads failing QC: %d (%.1f%%)' % (totalFailedQC, float(totalFailedQC) * 100 / totalReads)) ++ print(' # reads failing alignment length: %d (%.1f%%)' % (totalFailedAlignLen, float(totalFailedAlignLen) * 100 / totalReads)) ++ print(' # reads failing edit distance: %d (%.1f%%)' % (totalFailedEditDist, float(totalFailedEditDist) * 100 / totalReads)) ++ print(' # reads not properly paired: %d (%.1f%%)' % (totalFailedProperPair, float(totalFailedProperPair) * 100 / totalReads)) ++ print('') + + def parseCoverage(self, coverageFile): + """Read coverage information from file.""" +@@ -301,7 +301,7 @@ class Coverage(): + if seqId not in coverageStats[binId]: + coverageStats[binId][seqId] = {} + +- for i in xrange(3, len(lineSplit), 3): ++ for i in range(3, len(lineSplit), 3): + bamId = lineSplit[i] + coverage = float(lineSplit[i + 1]) + coverageStats[binId][seqId][bamId] = coverage +@@ -325,7 +325,7 @@ class Coverage(): + + # calculate mean coverage (weighted by scaffold length) + # for each bin under each BAM file +- for i in xrange(3, len(lineSplit), 3): ++ for i in range(3, len(lineSplit), 3): + bamId = lineSplit[i] + coverage = float(lineSplit[i + 1]) + binCoverages[binId][bamId].append(coverage) +@@ -341,13 +341,13 @@ class Coverage(): + + profiles = defaultdict(dict) + for binId in binStats: +- for bamId, stats in binStats[binId].iteritems(): ++ for bamId, stats in binStats[binId].items(): + binLength, meanBinCoverage = stats + coverages = binCoverages[binId][bamId] + + varCoverage = 0 + if len(coverages) > 1: +- varCoverage = mean(map(lambda x: (x - meanBinCoverage) ** 2, coverages)) ++ varCoverage = mean([(x - meanBinCoverage) ** 2 for x in coverages]) + + profiles[binId][bamId] = [meanBinCoverage, sqrt(varCoverage)] + +--- checkm/coverageWindows.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/coverageWindows.py +@@ -188,10 +188,10 @@ class CoverageWindows(): + try: + end += windowSize + except: +- print '*****************' +- print end +- print windowSize +- print '******************' ++ print('*****************') ++ print(end) ++ print(windowSize) ++ print('******************') + + coverage = float(sum(readLoader.coverage)) / seqLen + +@@ -239,13 +239,13 @@ class CoverageWindows(): + if self.logger.getEffectiveLevel() <= logging.INFO: + sys.stderr.write('\n') + +- print '' +- print ' # total reads: %d' % totalReads +- print ' # properly mapped reads: %d (%.1f%%)' % (totalMappedReads, float(totalMappedReads) * 100 / totalReads) +- print ' # duplicate reads: %d (%.1f%%)' % (totalDuplicates, float(totalDuplicates) * 100 / totalReads) +- print ' # secondary reads: %d (%.1f%%)' % (totalSecondary, float(totalSecondary) * 100 / totalReads) +- print ' # reads failing QC: %d (%.1f%%)' % (totalFailedQC, float(totalFailedQC) * 100 / totalReads) +- print ' # reads failing alignment length: %d (%.1f%%)' % (totalFailedAlignLen, float(totalFailedAlignLen) * 100 / totalReads) +- print ' # reads failing edit distance: %d (%.1f%%)' % (totalFailedEditDist, float(totalFailedEditDist) * 100 / totalReads) +- print ' # reads not properly paired: %d (%.1f%%)' % (totalFailedProperPair, float(totalFailedProperPair) * 100 / totalReads) +- print '' ++ print('') ++ print(' # total reads: %d' % totalReads) ++ print(' # properly mapped reads: %d (%.1f%%)' % (totalMappedReads, float(totalMappedReads) * 100 / totalReads)) ++ print(' # duplicate reads: %d (%.1f%%)' % (totalDuplicates, float(totalDuplicates) * 100 / totalReads)) ++ print(' # secondary reads: %d (%.1f%%)' % (totalSecondary, float(totalSecondary) * 100 / totalReads)) ++ print(' # reads failing QC: %d (%.1f%%)' % (totalFailedQC, float(totalFailedQC) * 100 / totalReads)) ++ print(' # reads failing alignment length: %d (%.1f%%)' % (totalFailedAlignLen, float(totalFailedAlignLen) * 100 / totalReads)) ++ print(' # reads failing edit distance: %d (%.1f%%)' % (totalFailedEditDist, float(totalFailedEditDist) * 100 / totalReads)) ++ print(' # reads not properly paired: %d (%.1f%%)' % (totalFailedProperPair, float(totalFailedProperPair) * 100 / totalReads)) ++ print('') +--- checkm/manifestManager.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/manifestManager.py +@@ -47,8 +47,8 @@ __MANIFEST__ = ".dmanifest" + # system includes + import os + import hashlib +-import urllib2 +-import urllib ++import urllib.request, urllib.error, urllib.parse ++import urllib.request, urllib.parse, urllib.error + import shutil + import errno + +@@ -121,15 +121,15 @@ class ManifestManager(object): + source = "" + # first we assume it is remote + try: +- s_man = urllib2.urlopen(sourceManifestLocation + "/" + sourceManifestName, None, self.timeout) ++ s_man = urllib.request.urlopen(sourceManifestLocation + "/" + sourceManifestName, None, self.timeout) + source = sourceManifestLocation + "/" + except ValueError: + # then it is probably a file + s_man = open(os.path.join(sourceManifestLocation, sourceManifestName)) + source = os.path.join(sourceManifestLocation) + os.path.sep +- except urllib2.URLError: ++ except urllib.error.URLError: + # problems connecting to server, perhaps user is behind a proxy or firewall +- print "Error: failed to connect to server." ++ print("Error: failed to connect to server.") + return (None, None, None, None, None) + + first_line = True +@@ -140,11 +140,11 @@ class ManifestManager(object): + # get the type of the manifest + s_type = self.getManType(line) + if s_type != l_type: +- print "Error: type of source manifest (%s) does not match type of local manifest (%s)" % (s_type, l_type) ++ print("Error: type of source manifest (%s) does not match type of local manifest (%s)" % (s_type, l_type)) + return (None, None, None, None, None) + else: + # no type specified +- print "Error: type of source manifest is not specified. Is this a valid manifest file?" ++ print("Error: type of source manifest is not specified. Is this a valid manifest file?") + return (None, None, None, None, None) + + self.type = l_type +@@ -174,7 +174,7 @@ class ManifestManager(object): + deleted.append(fields[0]) + + # check for new files +- for f in source_man.keys(): ++ for f in list(source_man.keys()): + if source_man[f][2] == False: + if source_man[f][0] == '-': + addedDirs.append(f) +@@ -190,28 +190,28 @@ class ManifestManager(object): + modified_size += int(source_man[f][1]) + + if len(addedFiles) > 0: +- print "#------------------------------------------------------" +- print "# Source contains %d new file(s) (%s)" % (len(addedFiles), self.formatData(new_size)) ++ print("#------------------------------------------------------") ++ print("# Source contains %d new file(s) (%s)" % (len(addedFiles), self.formatData(new_size))) + for f in addedFiles: +- print "\t".join([self.formatData(int(source_man[f][1])), f]) ++ print("\t".join([self.formatData(int(source_man[f][1])), f])) + + if len(addedDirs) > 0: +- print "#------------------------------------------------------" +- print "# Source contains %d new folders(s)" % (len(addedDirs)) ++ print("#------------------------------------------------------") ++ print("# Source contains %d new folders(s)" % (len(addedDirs))) + for f in addedDirs: +- print f ++ print(f) + + if len(modified) > 0: +- print "#------------------------------------------------------" +- print "# Source contains %d modified file(s) (%s)" % (len(modified), self.formatData(modified_size)) ++ print("#------------------------------------------------------") ++ print("# Source contains %d modified file(s) (%s)" % (len(modified), self.formatData(modified_size))) + for f in modified: +- print f ++ print(f) + + if len(deleted) > 0: +- print "#------------------------------------------------------" +- print "# %d files have been deleted in the source:" % len(deleted) ++ print("#------------------------------------------------------") ++ print("# %d files have been deleted in the source:" % len(deleted)) + for f in deleted: +- print f ++ print(f) + else: + return (source, + [(a, source_man[a]) for a in addedFiles], +@@ -245,13 +245,13 @@ class ManifestManager(object): + for f in modified: + total_size += int(f[1][1]) + if total_size != 0: +- print "****************************************************************" +- print "%d new file(s) to be downloaded from source" % len(added_files) +- print "%d existing file(s) to be updated" % len(modified) +- print "%s will need to be downloaded" % self.formatData(total_size) ++ print("****************************************************************") ++ print("%d new file(s) to be downloaded from source" % len(added_files)) ++ print("%d existing file(s) to be updated" % len(modified)) ++ print("%s will need to be downloaded" % self.formatData(total_size)) + do_down = self.promptUserDownload() + if not do_down: +- print "Download aborted" ++ print("Download aborted") + + update_manifest = False + if do_down: +@@ -262,13 +262,13 @@ class ManifestManager(object): + self.makeSurePathExists(full_path) + for add in added_files: + full_path = os.path.abspath(os.path.join(localManifestLocation, add[0])) +- urllib.urlretrieve(source+add[0], full_path) ++ urllib.request.urlretrieve(source+add[0], full_path) + for modify in modified: + full_path = os.path.abspath(os.path.join(localManifestLocation, modify[0])) +- urllib.urlretrieve(source+modify[0], full_path) ++ urllib.request.urlretrieve(source+modify[0], full_path) + + if update_manifest: +- print "(re) creating manifest file (please be patient)" ++ print("(re) creating manifest file (please be patient)") + self.createManifest(localManifestLocation, manifestName=localManifestName) + + return True +@@ -303,19 +303,19 @@ class ManifestManager(object): + input_not_ok = True + minimal=False + valid_responses = {'Y':True,'N':False} +- vrs = ",".join([x.lower() for x in valid_responses.keys()]) ++ vrs = ",".join([x.lower() for x in list(valid_responses.keys())]) + while(input_not_ok): + if(minimal): +- option = raw_input("Download? ("+vrs+") : ").upper() ++ option = input("Download? ("+vrs+") : ").upper() + else: +- option = raw_input("Confirm you want to download this data\n" \ ++ option = input("Confirm you want to download this data\n" \ + "Changes *WILL* be permanent\n" \ + "Continue? ("+vrs+") : ").upper() + if(option in valid_responses): +- print "****************************************************************" ++ print("****************************************************************") + return valid_responses[option] + else: +- print "ERROR: unrecognised choice '"+option+"'" ++ print("ERROR: unrecognised choice '"+option+"'") + minimal = True + + def walk(self, parents, full_path, rel_path, dirs, files, skipFile=__MANIFEST__): +--- checkm/taxonParser.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/taxonParser.py +@@ -73,8 +73,8 @@ class TaxonParser(): + numMarkers, numMarkerSets = markerSet.size() + pTable.add_row([rank, taxon, markerSet.numGenomes, numMarkers, numMarkerSets]) + +- print '' +- print pTable.get_string() ++ print('') ++ print(pTable.get_string()) + + def markerSet(self, rank, taxon, markerFile): + """Obtain specified taxonomic-specific marker set.""" +--- checkm/uniqueMarkers.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/uniqueMarkers.py +@@ -51,7 +51,7 @@ def getOppositeRankSpecificTaxonId(cursor, *args): + query.append(' %s != \'%s\' ' % (ranks[len(args) - 1], args[-1])) + query.append(' %s IS NULL' % ranks[len(args)]) + query_string = 'AND'.join(query) +- print query_string ++ print(query_string) + result = cursor.execute('SELECT Id, "Count" FROM taxons WHERE %s' % query_string) + return result.fetchall() + +@@ -121,7 +121,7 @@ def doWork(args): + markers_from_others[Id] += count + + descriptive_markers = [] +- for marker_id, _ in marker_in_taxon_mapping.items(): ++ for marker_id, _ in list(marker_in_taxon_mapping.items()): + if marker_id in markers_from_others: + fraction_in_others = float(markers_from_others[marker_id]) / float(others_total_count) + if fraction_in_others <= args.exclude: +@@ -135,7 +135,7 @@ def doWork(args): + des_markers.append(getDescriptiveMarkers(cur, i)) + + for des_acc, des_name in des_markers: +- print des_acc, des_name ++ print(des_acc, des_name) + + if __name__ == '__main__': + +--- checkm/util/img.py.orig 2022-03-15 18:25:01 UTC ++++ checkm/util/img.py +@@ -195,7 +195,7 @@ class IMG(object): + genomeIdsOfInterest = set() + for genomeId in metadata: + bKeep = True +- for r in xrange(0, len(searchTaxa)): ++ for r in range(0, len(searchTaxa)): + if taxonStr == 'universal': + bKeep = True + elif taxonStr == 'prokaryotes' and (metadata[genomeId]['taxonomy'][0] == 'Bacteria' or metadata[genomeId]['taxonomy'][0] == 'Archaea'): +@@ -222,8 +222,8 @@ class IMG(object): + + def lineageStats(self, metadata, mostSpecificRank): + stats = {} +- for r in xrange(0, mostSpecificRank + 1): +- for _, data in metadata.iteritems(): ++ for r in range(0, mostSpecificRank + 1): ++ for _, data in metadata.items(): + taxaStr = ';'.join(data['taxonomy'][0:r + 1]) + stats[taxaStr] = stats.get(taxaStr, 0) + 1 + +@@ -231,9 +231,9 @@ class IMG(object): + + def lineagesSorted(self, metadata, mostSpecificRank=6): + lineages = [] +- for r in xrange(0, mostSpecificRank + 1): ++ for r in range(0, mostSpecificRank + 1): + taxa = set() +- for _, data in metadata.iteritems(): ++ for _, data in metadata.items(): + if 'unclassified' not in data['taxonomy'][0:r + 1]: + taxa.add(';'.join(data['taxonomy'][0:r + 1])) + +@@ -274,7 +274,7 @@ class IMG(object): + geneIdToFamilyIds[geneId].add(clusterId) + count[clusterId] = count.get(clusterId, 0) + 1 + +- for clusterId, c in count.iteritems(): ++ for clusterId, c in count.items(): + if clusterId not in table: + table[clusterId] = {} + table[clusterId][genomeId] = c +@@ -288,7 +288,7 @@ class IMG(object): + + def filterGeneCountTable(self, genomeIds, table, ubiquityThreshold=0.9, singleCopyThreshold=0.9): + idsToFilter = [] +- for pfamId, genomeCounts in table.iteritems(): ++ for pfamId, genomeCounts in table.items(): + ubiquity = 0 + singleCopy = 0 + for genomeId in genomeIds: +@@ -342,7 +342,7 @@ class IMG(object): + # are a few cases where this isn't tree (?) so only PFAMs/TIGRFAMs + # with GFF entries are considered. + familyIdToScaffoldIds = {} +- for pfamId, geneIds in pfamIdToGeneIds.iteritems(): ++ for pfamId, geneIds in pfamIdToGeneIds.items(): + scaffolds = [] + for geneId in geneIds: + scaffold = genePosition.get(geneId, None) +@@ -352,7 +352,7 @@ class IMG(object): + if scaffolds: + familyIdToScaffoldIds[pfamId] = scaffolds + +- for tigrId, geneIds in tigrIdToGeneIds.iteritems(): ++ for tigrId, geneIds in tigrIdToGeneIds.items(): + scaffolds = [] + for geneId in geneIds: + scaffold = genePosition.get(geneId, None) +@@ -362,9 +362,9 @@ class IMG(object): + if scaffold: + familyIdToScaffoldIds[tigrId] = scaffolds + except: +- print '[BUG]: __genomeIdToClusterScaffold' +- print sys.exc_info()[0] +- print genomeId, geneId, tigrId, pfamId ++ print('[BUG]: __genomeIdToClusterScaffold') ++ print(sys.exc_info()[0]) ++ print(genomeId, geneId, tigrId, pfamId) + sys.exit() + + return familyIdToScaffoldIds +@@ -400,7 +400,7 @@ class IMG(object): + seqs = readFasta(genomeFile) + + seqLens = {} +- for seqId, seq in seqs.iteritems(): ++ for seqId, seq in seqs.items(): + seqLens[seqId] = len(seq) + + return seqLens +@@ -462,7 +462,7 @@ class IMG(object): + # are a few cases where this isn't tree (?) so only PFAMs/TIGRFAMs + # with GFF entries are considered. + familyIdToGenomePositions = {} +- for pfamId, geneIds in pfamIdToGeneIds.iteritems(): ++ for pfamId, geneIds in pfamIdToGeneIds.items(): + positions = [] + for geneId in geneIds: + position = genePosition.get(geneId, None) +@@ -472,7 +472,7 @@ class IMG(object): + if positions: + familyIdToGenomePositions[pfamId] = positions + +- for tigrId, geneIds in tigrIdToGeneIds.iteritems(): ++ for tigrId, geneIds in tigrIdToGeneIds.items(): + positions = [] + for geneId in geneIds: + position = genePosition.get(geneId, None) +@@ -482,9 +482,9 @@ class IMG(object): + if positions: + familyIdToGenomePositions[tigrId] = positions + except: +- print '[BUG]: __genomeFamilyPositions' +- print sys.exc_info()[0] +- print genomeId, geneId, tigrId, pfamId ++ print('[BUG]: __genomeFamilyPositions') ++ print(sys.exc_info()[0]) ++ print(genomeId, geneId, tigrId, pfamId) + sys.exit() + + return familyIdToGenomePositions diff --git a/biology/groopm/files/patch-2to3 b/biology/groopm/files/patch-2to3 new file mode 100644 index 00000000000..365000a5d09 --- /dev/null +++ b/biology/groopm/files/patch-2to3 @@ -0,0 +1,3414 @@ +--- groopm/PCA.py.orig 2014-11-26 01:01:33 UTC ++++ groopm/PCA.py +@@ -79,7 +79,7 @@ class PCA: + try: + self.sumvariance /= self.sumvariance[-1] + except: +- print len(A), len(self.sumvariance), len(self.eigen) ++ print(len(A), len(self.sumvariance), len(self.eigen)) + raise + + self.npc = np.searchsorted( self.sumvariance, fraction ) + 1 +@@ -127,13 +127,13 @@ class Center: + def __init__( self, A, axis=0, scale=True, verbose=1 ): + self.mean = A.mean(axis=axis) + if verbose: +- print "Center -= A.mean:", self.mean ++ print("Center -= A.mean:", self.mean) + A -= self.mean + if scale: + std = A.std(axis=axis) + self.std = np.where( std, std, 1. ) + if verbose: +- print "Center /= A.std:", self.std ++ print("Center /= A.std:", self.std) + A /= self.std + else: + self.std = np.ones( A.shape[-1] ) +--- groopm/bin.py.orig 2015-03-06 07:01:36 UTC ++++ groopm/bin.py +@@ -59,8 +59,8 @@ from numpy import (around as np_around, + median as np_median, + std as np_std) + +-from ellipsoid import EllipsoidTool +-from groopmExceptions import ModeNotAppropriateException ++from .ellipsoid import EllipsoidTool ++from .groopmExceptions import ModeNotAppropriateException + + np.seterr(all='raise') + +@@ -155,7 +155,7 @@ class Bin: + """Combine the contigs of another bin with this one""" + # consume all the other bins rowIndices + if(verbose): +- print " BIN:",deadBin.id,"will be consumed by BIN:",self.id ++ print(" BIN:",deadBin.id,"will be consumed by BIN:",self.id) + self.rowIndices = np.concatenate([self.rowIndices, deadBin.rowIndices]) + self.binSize = self.rowIndices.shape[0] + +@@ -326,7 +326,7 @@ class Bin: + try: + return ET.getMinVolEllipse(bin_points, retA=retA) + except: +- print bin_points ++ print(bin_points) + raise + else: # minimum bounding ellipse of a point is 0 + if retA: +@@ -474,13 +474,13 @@ class Bin: + fig.set_size_inches(10,4) + plt.savefig(fileName,dpi=300) + except: +- print "Error saving image:", fileName, sys.exc_info()[0] ++ print("Error saving image:", fileName, sys.exc_info()[0]) + raise + else: + try: + plt.show() + except: +- print "Error showing image:", sys.exc_info()[0] ++ print("Error showing image:", sys.exc_info()[0]) + raise + del fig + +@@ -504,13 +504,13 @@ class Bin: + fig.set_size_inches(6,6) + plt.savefig(fileName+".png",dpi=300) + except: +- print "Error saving image:", fileName, sys.exc_info()[0] ++ print("Error saving image:", fileName, sys.exc_info()[0]) + raise + elif(show): + try: + plt.show() + except: +- print "Error showing image:", sys.exc_info()[0] ++ print("Error showing image:", sys.exc_info()[0]) + raise + plt.close(fig) + del fig +@@ -636,8 +636,8 @@ class Bin: + + If you pass through an EllipsoidTool then it will plot the minimum bounding ellipse as well! + """ +- disp_vals = np.array(zip([kPCA1[i] for i in self.rowIndices], +- [kPCA2[i] for i in self.rowIndices])) ++ disp_vals = np.array(list(zip([kPCA1[i] for i in self.rowIndices], ++ [kPCA2[i] for i in self.rowIndices]))) + disp_lens = np.array([np.sqrt(contigLengths[i]) for i in self.rowIndices]) + + # reshape +@@ -695,7 +695,7 @@ class Bin: + data = [str(self.id), str(isLikelyChimeric[self.id]), str(self.totalBP), str(self.binSize), gcm_str, gcs_str] + cov_mean = np.mean(covProfiles[self.rowIndices], axis=0) + cov_std = np.std(covProfiles[self.rowIndices], axis=0) +- for i in xrange(0, len(cov_mean)): ++ for i in range(0, len(cov_mean)): + data.append('%.4f' % cov_mean[i]) + data.append('%.4f' % cov_std[i]) + stream.write(separator.join(data)+"\n") +--- groopm/binManager.py.orig 2015-03-06 07:02:49 UTC ++++ groopm/binManager.py +@@ -85,11 +85,11 @@ from scipy.stats import f_oneway, distributions + from scipy.cluster.vq import kmeans,vq + + # GroopM imports +-from profileManager import ProfileManager +-from bin import Bin, mungeCbar +-import groopmExceptions as ge +-from groopmUtils import makeSurePathExists +-from ellipsoid import EllipsoidTool ++from .profileManager import ProfileManager ++from .bin import Bin, mungeCbar ++from . import groopmExceptions as ge ++from .groopmUtils import makeSurePathExists ++from .ellipsoid import EllipsoidTool + + np_seterr(all='raise') + +@@ -182,15 +182,15 @@ class BinManager: + if self.PM.numStoits == 3: + self.PM.transformedCP = self.PM.covProfiles + else: +- print "Number of stoits != 3. You need to transform" ++ print("Number of stoits != 3. You need to transform") + self.PM.transformCP(timer, silent=silent) + if not silent: +- print " Making bin objects" ++ print(" Making bin objects") + self.makeBins(self.getBinMembers()) + if not silent: +- print " Loaded %d bins from database" % len(self.bins) ++ print(" Loaded %d bins from database" % len(self.bins)) + if not silent: +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + + def getBinMembers(self): +@@ -210,7 +210,7 @@ class BinManager: + + # we need to get the largest BinId in use + if len(bin_members) > 0: +- self.nextFreeBinId = np_max(bin_members.keys()) ++ self.nextFreeBinId = np_max(list(bin_members.keys())) + return bin_members + + def makeBins(self, binMembers, zeroIsBin=False): +@@ -224,8 +224,8 @@ class BinManager: + self.bins[bid] = Bin(np_array(binMembers[bid]), bid, self.PM.scaleFactor-1) + self.bins[bid].makeBinDist(self.PM.transformedCP, self.PM.averageCoverages, self.PM.kmerNormPC1, self.PM.kmerPCs, self.PM.contigGCs, self.PM.contigLengths) + if len(invalid_bids) != 0: +- print "MT bins!" +- print invalid_bids ++ print("MT bins!") ++ print(invalid_bids) + exit(-1) + + def saveBins(self, binAssignments={}, nuke=False): +@@ -384,7 +384,7 @@ class BinManager: + all_links[key] = links[link] + + # sort and return +- return sorted(all_links.iteritems(), key=itemgetter(1), reverse=True) ++ return sorted(iter(all_links.items()), key=itemgetter(1), reverse=True) + + def getWithinLinkProfiles(self): + """Determine the average number of links between contigs for all bins""" +@@ -468,7 +468,7 @@ class BinManager: + (bin_assignment_update, bids) = self.getSplitties(bid, n, mode) + + if(auto and saveBins): +- print 'here!!!!' ++ print('here!!!!') + # charge on through + self.deleteBins([bids[0]], force=True) # delete the combined bin + # save new bins +@@ -536,12 +536,12 @@ class BinManager: + parts = 0 + while(not_got_parts): + try: +- parts = int(raw_input("Enter new number of parts:")) ++ parts = int(input("Enter new number of parts:")) + except ValueError: +- print "You need to enter an integer value!" ++ print("You need to enter an integer value!") + parts = 0 + if(1 == parts): +- print "Don't be a silly sausage!" ++ print("Don't be a silly sausage!") + elif(0 != parts): + not_got_parts = False + self.split(bid, +@@ -664,7 +664,7 @@ class BinManager: + F_cutoff = distributions.f.ppf(confidence, 2, len(dist1)+len(dist2)-2) + F_value = f_oneway(dist1,dist2)[0] + if tag != "": +- print "%s [V: %f, C: %f]" % (tag, F_value, F_cutoff) ++ print("%s [V: %f, C: %f]" % (tag, F_value, F_cutoff)) + return F_value < F_cutoff + + def merge(self, bids, auto=False, manual=False, newBid=False, saveBins=False, verbose=False, printInstructions=True, use_elipses=True): +@@ -715,11 +715,11 @@ class BinManager: + self.deleteBins([tmp_bin.id], force=True) + user_option = self.promptOnMerge(bids=[parent_bin.id,dead_bin.id]) + if(user_option == "N"): +- print "Merge skipped" ++ print("Merge skipped") + ret_val = 1 + continue_merge=False + elif(user_option == "Q"): +- print "All mergers skipped" ++ print("All mergers skipped") + return 0 + else: + ret_val = 2 +@@ -799,7 +799,7 @@ class BinManager: + try: + del self.PM.binnedRowIndices[row_index] + except KeyError: +- print bid, row_index, "FUNG" ++ print(bid, row_index, "FUNG") + self.PM.binIds[row_index] = 0 + + bin_assignment_update[row_index] = 0 +@@ -826,7 +826,7 @@ class BinManager: + # UI + + def printMergeInstructions(self): +- raw_input( "****************************************************************\n" ++ input( "****************************************************************\n" + " MERGING INSTRUCTIONS - PLEASE READ CAREFULLY\n" + "****************************************************************\n" + " The computer cannot always be trusted to perform bin mergers\n" +@@ -836,10 +836,10 @@ class BinManager: + " to continue with the merging operation.\n" + " The image on the far right shows the bins after merging\n" + " Press any key to produce plots...") +- print "****************************************************************" ++ print("****************************************************************") + + def printSplitInstructions(self): +- raw_input( "****************************************************************\n" ++ input( "****************************************************************\n" + " SPLITTING INSTRUCTIONS - PLEASE READ CAREFULLY\n" + "****************************************************************\n" + " The computer cannot always be trusted to perform bin splits\n" +@@ -848,7 +848,7 @@ class BinManager: + " be split. Look carefully at each plot and then close the plot\n" + " to continue with the splitting operation.\n\n" + " Press any key to produce plots...") +- print "****************************************************************" ++ print("****************************************************************") + + def getPlotterMergeIds(self): + """Prompt the user for ids to be merged and check that it's all good""" +@@ -856,7 +856,7 @@ class BinManager: + ret_bids = [] + while(input_not_ok): + ret_bids = [] +- option = raw_input("Please enter 'space' separated bin Ids or 'q' to quit: ") ++ option = input("Please enter 'space' separated bin Ids or 'q' to quit: ") + if(option.upper() == 'Q'): + return [] + bids = option.split(" ") +@@ -866,13 +866,13 @@ class BinManager: + i_bid = int(bid) + # check that it's in the bins list + if(i_bid not in self.bins): +- print "**Error: bin",bid,"not found" ++ print("**Error: bin",bid,"not found") + input_not_ok = True + break + input_not_ok = False + ret_bids.append(i_bid) + except ValueError: +- print "**Error: invalid value:", bid ++ print("**Error: invalid value:", bid) + input_not_ok = True + break + return ret_bids +@@ -889,19 +889,19 @@ class BinManager: + bin_str += " and "+str(bids[i]) + while(input_not_ok): + if(minimal): +- option = raw_input(" Merge? ("+vrs+") : ") ++ option = input(" Merge? ("+vrs+") : ") + else: +- option = raw_input(" ****WARNING**** About to merge bins"+bin_str+"\n" \ ++ option = input(" ****WARNING**** About to merge bins"+bin_str+"\n" \ + " If you continue you *WILL* overwrite existing bins!\n" \ + " You have been shown a 3d plot of the bins to be merged.\n" \ + " Continue only if you're sure this is what you want to do!\n" \ + " y = yes, n = no, q = no and quit merging\n" \ + " Merge? ("+vrs+") : ") + if(option.upper() in valid_responses): +- print "****************************************************************" ++ print("****************************************************************") + return option.upper() + else: +- print "Error, unrecognised choice '"+option.upper()+"'" ++ print("Error, unrecognised choice '"+option.upper()+"'") + minimal = True + + def promptOnSplit(self, parts, mode, minimal=False): +@@ -911,9 +911,9 @@ class BinManager: + vrs = ",".join([str.lower(str(x)) for x in valid_responses]) + while(input_not_ok): + if(minimal): +- option = raw_input(" Split? ("+vrs+") : ") ++ option = input(" Split? ("+vrs+") : ") + else: +- option = raw_input(" ****WARNING**** About to split bin into "+str(parts)+" parts\n" \ ++ option = input(" ****WARNING**** About to split bin into "+str(parts)+" parts\n" \ + " If you continue you *WILL* overwrite existing bins!\n" \ + " You have been shown a 3d plot of the bin after splitting.\n" \ + " Continue only if you're sure this is what you want to do!\n" \ +@@ -923,13 +923,13 @@ class BinManager: + " Split? ("+vrs+") : ") + if(option.upper() in valid_responses): + if(option.upper() == 'K' and mode.upper() == 'KMER' or option.upper() == 'C' and mode.upper() == 'COV' or option.upper() == 'L' and mode.upper() == 'LEN'): +- print "Error, you are already using that profile to split!" ++ print("Error, you are already using that profile to split!") + minimal=True + else: +- print "****************************************************************" ++ print("****************************************************************") + return option.upper() + else: +- print "Error, unrecognised choice '"+option.upper()+"'" ++ print("Error, unrecognised choice '"+option.upper()+"'") + minimal = True + + def promptOnDelete(self, bids, minimal=False): +@@ -940,19 +940,19 @@ class BinManager: + bids_str = ",".join([str.lower(str(x)) for x in bids]) + while(input_not_ok): + if(minimal): +- option = raw_input(" Delete? ("+vrs+") : ") ++ option = input(" Delete? ("+vrs+") : ") + else: +- option = raw_input(" ****WARNING**** About to delete bin(s):\n" \ ++ option = input(" ****WARNING**** About to delete bin(s):\n" \ + " "+bids_str+"\n" \ + " If you continue you *WILL* overwrite existing bins!\n" \ + " Continue only if you're sure this is what you want to do!\n" \ + " y = yes, n = no\n"\ + " Delete? ("+vrs+") : ") + if(option.upper() in valid_responses): +- print "****************************************************************" ++ print("****************************************************************") + return option.upper() + else: +- print "Error, unrecognised choice '"+option.upper()+"'" ++ print("Error, unrecognised choice '"+option.upper()+"'") + minimal = True + + #------------------------------------------------------------------------------ +@@ -1039,10 +1039,10 @@ class BinManager: + + # find the mean and stdev + if(not makeKillList): +- return (np_mean(np_array(Ms.values())), np_std(np_array(Ms.values())), np_median(np_array(Ss.values())), np_std(np_array(Ss.values()))) ++ return (np_mean(np_array(list(Ms.values()))), np_std(np_array(list(Ms.values()))), np_median(np_array(list(Ss.values()))), np_std(np_array(list(Ss.values())))) + + else: +- cutoff = np_mean(np_array(Ms.values())) + tolerance * np_std(np_array(Ms.values())) ++ cutoff = np_mean(np_array(list(Ms.values()))) + tolerance * np_std(np_array(list(Ms.values()))) + kill_list = [] + for bid in Ms: + if(Ms[bid] > cutoff): +@@ -1054,7 +1054,7 @@ class BinManager: + + return a list of potentially confounding kmer indices + """ +- print " Measuring kmer type variances" ++ print(" Measuring kmer type variances") + means = np_array([]) + stdevs = np_array([]) + bids = np_array([]) +@@ -1094,12 +1094,12 @@ class BinManager: + return_indices.append(sort_within_indices[i]) + + if(plot): +- print "BETWEEN" ++ print("BETWEEN") + for i in range(0,number_to_trim): +- print names[sort_between_indices[i]] +- print "WITHIN" ++ print(names[sort_between_indices[i]]) ++ print("WITHIN") + for i in range(0,number_to_trim): +- print names[sort_within_indices[i]] ++ print(names[sort_within_indices[i]]) + + plt.figure(1) + plt.subplot(211) +@@ -1126,7 +1126,7 @@ class BinManager: + stdout = open(fileName, 'w') + self.printInner(outFormat, stdout) + except: +- print "Error diverting stout to file:", fileName, exc_info()[0] ++ print("Error diverting stout to file:", fileName, exc_info()[0]) + raise + else: + self.printInner(outFormat) +@@ -1139,14 +1139,14 @@ class BinManager: + stream.write(separator.join(["#\"bid\"","\"cid\"","\"length\"","\"GC\""])+"\n") + elif(outFormat == 'bins'): + header = ["\"bin id\"","\"Likely chimeric\"","\"length (bp)\"","\"# seqs\"","\"GC mean\"","\"GC std\""] +- for i in xrange(0, len(self.PM.covProfiles[0])): ++ for i in range(0, len(self.PM.covProfiles[0])): + header.append("\"Coverage " + str(i+1) + " mean\"") + header.append("\"Coverage " + str(i+1) + " std\"") + stream.write(separator.join(header) + "\n") + elif(outFormat == 'full'): + pass + else: +- print "Error: Unrecognised format:", outFormat ++ print("Error: Unrecognised format:", outFormat) + return + + for bid in self.getBids(): +@@ -1224,13 +1224,13 @@ class BinManager: + try: + plt.savefig(fileName,dpi=300) + except: +- print "Error saving image:", fileName, exc_info()[0] ++ print("Error saving image:", fileName, exc_info()[0]) + raise + else: + try: + plt.show() + except: +- print "Error showing image:", exc_info()[0] ++ print("Error showing image:", exc_info()[0]) + raise + + plt.close(fig) +@@ -1344,7 +1344,7 @@ class BinManager: + try: + plt.show() + except: +- print "Error showing image:", exc_info()[0] ++ print("Error showing image:", exc_info()[0]) + raise + + plt.close(fig) +@@ -1369,10 +1369,10 @@ class BinManager: + self.bins[bid].makeBinDist(self.PM.transformedCP, self.PM.averageCoverages, self.PM.kmerNormPC1, self.PM.kmerPCs, self.PM.contigGCs, self.PM.contigLengths) + + if(sideBySide): +- print "Plotting side by side" +- self.plotSideBySide(self.bins.keys(), tag=FNPrefix, ignoreContigLengths=ignoreContigLengths) ++ print("Plotting side by side") ++ self.plotSideBySide(list(self.bins.keys()), tag=FNPrefix, ignoreContigLengths=ignoreContigLengths) + else: +- print "Plotting bins" ++ print("Plotting bins") + for bid in self.getBids(): + if folder != '': + self.bins[bid].plotBin(self.PM.transformedCP, self.PM.contigGCs, self.PM.kmerNormPC1, +@@ -1387,7 +1387,7 @@ class BinManager: + def plotBinCoverage(self, plotEllipses=False, plotContigLengs=False, printID=False): + """Make plots of all the bins""" + +- print "Plotting first 3 stoits in untransformed coverage space" ++ print("Plotting first 3 stoits in untransformed coverage space") + + # plot contigs in coverage space + fig = plt.figure() +@@ -1452,7 +1452,7 @@ class BinManager: + plt.show() + plt.close(fig) + except: +- print "Error showing image", exc_info()[0] ++ print("Error showing image", exc_info()[0]) + raise + + del fig +@@ -1504,13 +1504,13 @@ class BinManager: + fig.set_size_inches(12,6) + plt.savefig(fileName,dpi=300) + except: +- print "Error saving image:", fileName, exc_info()[0] ++ print("Error saving image:", fileName, exc_info()[0]) + raise + elif(show): + try: + plt.show() + except: +- print "Error showing image:", exc_info()[0] ++ print("Error showing image:", exc_info()[0]) + raise + plt.close(fig) + del fig +@@ -1554,7 +1554,7 @@ class BinManager: + plt.show() + plt.close(fig) + except: +- print "Error showing image", exc_info()[0] ++ print("Error showing image", exc_info()[0]) + raise + del fig + +@@ -1563,7 +1563,7 @@ class BinManager: + (bin_centroid_points, _bin_centroid_colors, bin_centroid_gc, _bids) = self.findCoreCentres(processChimeric=showChimeric) + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') +- print bin_centroid_gc ++ print(bin_centroid_gc) + sc = ax.scatter(bin_centroid_points[:,0], bin_centroid_points[:,1], bin_centroid_points[:,2], edgecolors='k', c=bin_centroid_gc, cmap=self.PM.colorMapGC, vmin=0.0, vmax=1.0) + sc.set_edgecolors = sc.set_facecolors = lambda *args:None # disable depth transparency effect + +@@ -1588,7 +1588,7 @@ class BinManager: + plt.show() + plt.close(fig) + except: +- print "Error showing image", exc_info()[0] ++ print("Error showing image", exc_info()[0]) + raise + del fig + +--- groopm/cluster.py.orig 2015-03-06 04:42:51 UTC ++++ groopm/cluster.py +@@ -95,11 +95,11 @@ from scipy.spatial.distance import pdist, squareform, + from scipy.misc import imsave + + # GroopM imports +-from profileManager import ProfileManager +-from binManager import BinManager +-from refine import GrubbsTester, RefineEngine +-from PCA import PCA, Center +-from groopmExceptions import BinNotFoundException ++from .profileManager import ProfileManager ++from .binManager import BinManager ++from .refine import GrubbsTester, RefineEngine ++from .PCA import PCA, Center ++from .groopmExceptions import BinNotFoundException + + np_seterr(all='raise') + +@@ -160,22 +160,22 @@ class ClusterEngine: + vrs = ",".join([str.lower(str(x)) for x in valid_responses]) + while(input_not_ok): + if(minimal): +- option = raw_input(" Overwrite? ("+vrs+") : ") ++ option = input(" Overwrite? ("+vrs+") : ") + else: +- option = raw_input(" ****WARNING**** Database: '"+self.PM.dbFileName+"' has already been clustered.\n" \ ++ option = input(" ****WARNING**** Database: '"+self.PM.dbFileName+"' has already been clustered.\n" \ + " If you continue you *MAY* overwrite existing bins!\n" \ + " Overwrite? ("+vrs+") : ") + if(option.upper() in valid_responses): +- print "****************************************************************" ++ print("****************************************************************") + if(option.upper() == "N"): +- print "Operation cancelled" ++ print("Operation cancelled") + return False + else: + break + else: +- print "Error, unrecognised choice '"+option.upper()+"'" ++ print("Error, unrecognised choice '"+option.upper()+"'") + minimal = True +- print "Will Overwrite database",self.PM.dbFileName ++ print("Will Overwrite database",self.PM.dbFileName) + return True + + #------------------------------------------------------------------------------ +@@ -189,10 +189,10 @@ class ClusterEngine: + + # get some data + self.PM.loadData(self.timer, "length >= "+str(coreCut)) +- print " %s" % self.timer.getTimeStamp() ++ print(" %s" % self.timer.getTimeStamp()) + + # transform the data +- print " Loading transformed data" ++ print(" Loading transformed data") + self.PM.transformCP(self.timer) + # plot the transformed space (if we've been asked to...) + #if(self.debugPlots >= 3): +@@ -201,15 +201,15 @@ class ClusterEngine: + # now we can make this guy + self.TSpan = np_mean([np_norm(self.PM.corners[i] - self.PM.TCentre) for i in range(self.PM.numStoits)]) + +- print " %s" % self.timer.getTimeStamp() ++ print(" %s" % self.timer.getTimeStamp()) + + # cluster and bin! +- print "Create cores" ++ print("Create cores") + self.initialiseCores(kmerThreshold, coverageThreshold) +- print " %s" % self.timer.getTimeStamp() ++ print(" %s" % self.timer.getTimeStamp()) + + # condense cores +- print "Refine cores [begin: %d]" % len(self.BM.bins) ++ print("Refine cores [begin: %d]" % len(self.BM.bins)) + if self.finalPlot: + prfx = "CORE" + else: +@@ -217,9 +217,9 @@ class ClusterEngine: + self.RE.refineBins(self.timer, auto=True, saveBins=False, plotFinal=prfx, gf=gf) + + # Now save all the stuff to disk! +- print "Saving bins" ++ print("Saving bins") + self.BM.saveBins(nuke=True) +- print " %s" % self.timer.getTimeStamp() ++ print(" %s" % self.timer.getTimeStamp()) + + def initialiseCores(self, kmerThreshold, coverageThreshold): + """Process contigs and form CORE bins""" +@@ -230,8 +230,8 @@ class ClusterEngine: + # We can make a heat map and look for hot spots + self.populateImageMaps() + sub_counter = 0 +- print " .... .... .... .... .... .... .... .... .... ...." +- print "%4d" % sub_counter, ++ print(" .... .... .... .... .... .... .... .... .... ....") ++ print("%4d" % sub_counter, end=' ') + new_line_counter = 0 + num_bins = 0 + +@@ -303,13 +303,13 @@ class ClusterEngine: + self.updatePostBin(bin) + + new_line_counter += 1 +- print "% 4d" % bin.binSize, ++ print("% 4d" % bin.binSize, end=' ') + + # make the printing prettier + if(new_line_counter > 9): + new_line_counter = 0 + sub_counter += 10 +- print "\n%4d" % sub_counter, ++ print("\n%4d" % sub_counter, end=' ') + + if(self.debugPlots >= 1): + #***slow plot! +@@ -317,7 +317,7 @@ class ClusterEngine: + + except BinNotFoundException: pass + +- print "\n .... .... .... .... .... .... .... .... .... ...." ++ print("\n .... .... .... .... .... .... .... .... .... ....") + + def findNewClusterCenters(self, kmerThreshold, coverageThreshold): + """Find a putative cluster""" +@@ -498,32 +498,32 @@ class ClusterEngine: + k_dist_matrix = squareform(pdist(k_dat, 'cityblock')) + k_radius = np_median(np_sort(k_dist_matrix)[:,eps_neighbours]) + except MemoryError: +- print "\n" +- print '*******************************************************************************' +- print '********************************* ERROR *********************************' +- print '*******************************************************************************' +- print 'GroopM is attempting to do some maths on a putative bin which contains:' +- print +- print '\t\t%d contigs' % (len(rowIndices)) +- print +- print 'This has caused your machine to run out of memory.' +- print 'The most likely cause is that your samples are very different from each other.' +- print 'You can confirm this by running:' +- print +- print '\t\tgroopm explore -m allcontigs %s' % self.PM.dbFileName +- print +- print 'If you notice only vertical "spears" of contigs at the corners of the plot then' +- print 'this means that your samples are very different and you are not getting a good' +- print 'mapping from all samples to all contigs. You may get more mileage by assembling' +- print 'and binning your samples separately.' +- print +- print 'If you notice "clouds" of contigs then congratulations! You have found a bug.' +- print 'Please let me know at "%s or via github.com/minillinim/GroopM' % __email__ +- print +- print 'GroopM is aborting... sorry' +- print +- print '*******************************************************************************' +- print "\n" ++ print("\n") ++ print('*******************************************************************************') ++ print('********************************* ERROR *********************************') ++ print('*******************************************************************************') ++ print('GroopM is attempting to do some maths on a putative bin which contains:') ++ print() ++ print('\t\t%d contigs' % (len(rowIndices))) ++ print() ++ print('This has caused your machine to run out of memory.') ++ print('The most likely cause is that your samples are very different from each other.') ++ print('You can confirm this by running:') ++ print() ++ print('\t\tgroopm explore -m allcontigs %s' % self.PM.dbFileName) ++ print() ++ print('If you notice only vertical "spears" of contigs at the corners of the plot then') ++ print('this means that your samples are very different and you are not getting a good') ++ print('mapping from all samples to all contigs. You may get more mileage by assembling') ++ print('and binning your samples separately.') ++ print() ++ print('If you notice "clouds" of contigs then congratulations! You have found a bug.') ++ print('Please let me know at "%s or via github.com/minillinim/GroopM' % __email__) ++ print() ++ print('GroopM is aborting... sorry') ++ print() ++ print('*******************************************************************************') ++ print("\n") + exit(-1) + + # find nearest neighbours to each point in whitened coverage space, +@@ -1341,7 +1341,7 @@ class HoughPartitioner: + diffs *= (len(diffs)-1) + + # make it 2D +- t_data = np_array(zip(diffs, np_arange(d_len))) ++ t_data = np_array(list(zip(diffs, np_arange(d_len)))) + ###MMM FIX + #im_shape = (int(np_max(t_data, axis=0)[0]+1), d_len) + im_shape = (d_len, d_len) +@@ -1532,7 +1532,7 @@ class HoughPartitioner: + if imgTag is not None: + # make a pretty picture + fff = np_ones(imShape) * 255 +- for p in found_line.keys(): ++ for p in list(found_line.keys()): + fff[p[0],p[1]] = 220 + for p in tData: + fff[p[0],p[1]] = 0 +@@ -1573,7 +1573,7 @@ class HoughPartitioner: + if real_index not in assigned: + tmp[real_index] = None + assigned[real_index] = None +- centre = np_array(tmp.keys()) ++ centre = np_array(list(tmp.keys())) + if len(centre) > 0: + return np_array([centre]) + # nuffin +@@ -1593,7 +1593,7 @@ class HoughPartitioner: + if real_index not in assigned: + tmp[real_index] = None + assigned[real_index] = None +- centre = np_array(tmp.keys()) ++ centre = np_array(list(tmp.keys())) + + rets = [] + +@@ -1609,8 +1609,8 @@ class HoughPartitioner: + tmp[real_index] = None + assigned[real_index] = None + +- if len(tmp.keys()) > 0: +- rets.append(np_array(tmp.keys())) ++ if len(list(tmp.keys())) > 0: ++ rets.append(np_array(list(tmp.keys()))) + + else: + # otherwise we keep working with ranges +@@ -1643,8 +1643,8 @@ class HoughPartitioner: + tmp[real_index] = None + assigned[real_index] = None + +- if len(tmp.keys()) > 0: +- rets.append(np_array(tmp.keys())) ++ if len(list(tmp.keys())) > 0: ++ rets.append(np_array(list(tmp.keys()))) + else: + right_p = self.recursiveSelect(tData, + imShape, +@@ -1723,40 +1723,40 @@ class HoughPartitioner: + iry = half_rows + int(r/dr) + accumulator[iry, theta_index] -= 1 + """ +- cos_sin_array = np_array(zip([np_sin(dth * theta_index) for theta_index in range(cols)], +- [np_cos(dth * theta_index) for theta_index in range(cols)])) ++ cos_sin_array = np_array(list(zip([np_sin(dth * theta_index) for theta_index in range(cols)], ++ [np_cos(dth * theta_index) for theta_index in range(cols)]))) + Rs = np_array(np_sum(np_reshape([p * cos_sin_array for p in data], (d_len*cols,2)), + axis=1)/dr).astype('int') + half_rows +- Cs = np_array(range(cols)*d_len) ++ Cs = np_array(list(range(cols))*d_len) + + try: + flat_indices = Rs * cols + Cs + except ValueError: +- print "\n" +- print '*******************************************************************************' +- print '********************************* ERROR *********************************' +- print '*******************************************************************************' +- print 'GroopM is attempting to do some maths on a putative bin which contains' +- print 'too many contigs.' +- print +- print 'This has resulted in a buffer overflow in the numpy library... oops.' +- print 'The most likely cause is that your samples are very different from each other.' +- print 'You can confirm this by running:' +- print +- print '\t\tgroopm explore -c 0 -m allcontigs ' +- print +- print 'If you notice only vertical "spears" of contigs at the corners of the plot then' +- print 'this means that your samples are very different and you are not getting a good' +- print 'mapping from all samples to all contigs. You may get more mileage by assembling' +- print 'and binning your samples separately.' +- print +- print 'If you notice "clouds" of contigs then congratulations! You have found a bug.' +- print 'Please let me know at "%s or via github.com/minillinim/GroopM' % __email__ +- print +- print 'GroopM is aborting... sorry' +- print +- print '*******************************************************************************' +- print "\n" ++ print("\n") ++ print('*******************************************************************************') ++ print('********************************* ERROR *********************************') ++ print('*******************************************************************************') ++ print('GroopM is attempting to do some maths on a putative bin which contains') ++ print('too many contigs.') ++ print() ++ print('This has resulted in a buffer overflow in the numpy library... oops.') ++ print('The most likely cause is that your samples are very different from each other.') ++ print('You can confirm this by running:') ++ print() ++ print('\t\tgroopm explore -c 0 -m allcontigs ') ++ print() ++ print('If you notice only vertical "spears" of contigs at the corners of the plot then') ++ print('this means that your samples are very different and you are not getting a good') ++ print('mapping from all samples to all contigs. You may get more mileage by assembling') ++ print('and binning your samples separately.') ++ print() ++ print('If you notice "clouds" of contigs then congratulations! You have found a bug.') ++ print('Please let me know at "%s or via github.com/minillinim/GroopM' % __email__) ++ print() ++ print('GroopM is aborting... sorry') ++ print() ++ print('*******************************************************************************') ++ print("\n") + exit(-1) + + # update the accumulator with integer decrements +--- groopm/groopm.py.orig 2014-11-26 01:01:33 UTC ++++ groopm/groopm.py +@@ -52,14 +52,14 @@ __status__ = "Released" + import matplotlib as mpl + + # GroopM imports +-import mstore +-import cluster +-import refine +-import binManager +-import groopmUtils +-import groopmTimekeeper as gtime +-from groopmExceptions import ExtractModeNotAppropriateException +-from mstore import GMDataManager ++from . import mstore ++from . import cluster ++from . import refine ++from . import binManager ++from . import groopmUtils ++from . import groopmTimekeeper as gtime ++from .groopmExceptions import ExtractModeNotAppropriateException ++from .mstore import GMDataManager + + ############################################################################### + ############################################################################### +@@ -100,12 +100,12 @@ class GroopMOptionsParser(): + timer = gtime.TimeKeeper() + if(options.subparser_name == 'parse'): + # parse raw input +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in data parsing mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in data parsing mode..." % self.GMVersion) ++ print("*******************************************************************************") + # check this here: + if len(options.bamfiles) < 3: +- print "Sorry, You must supply at least 3 bamFiles to use GroopM. (You supplied %d)\n Exiting..." % len(options.bamfiles) ++ print("Sorry, You must supply at least 3 bamFiles to use GroopM. (You supplied %d)\n Exiting..." % len(options.bamfiles)) + return + GMdata = mstore.GMDataManager() + success = GMdata.createDB(options.bamfiles, +@@ -116,13 +116,13 @@ class GroopMOptionsParser(): + force=options.force, + threads=options.threads) + if not success: +- print options.dbname,"not updated" ++ print(options.dbname,"not updated") + + elif(options.subparser_name == 'core'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in core creation mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in core creation mode..." % self.GMVersion) ++ print("*******************************************************************************") + CE = cluster.ClusterEngine(options.dbname, + timer, + force=options.force, +@@ -139,9 +139,9 @@ class GroopMOptionsParser(): + + elif(options.subparser_name == 'refine'): + # refine bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in core refining mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in core refining mode..." % self.GMVersion) ++ print("*******************************************************************************") + bids = [] + #if options.bids is not None: + # bids = options.bids +@@ -158,7 +158,7 @@ class GroopMOptionsParser(): + pfx="REFINED" + else: + pfx="" +- print "Refine bins" ++ print("Refine bins") + + RE.refineBins(timer, + auto=auto, +@@ -167,9 +167,9 @@ class GroopMOptionsParser(): + + elif(options.subparser_name == 'recruit'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in bin expansion mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in bin expansion mode..." % self.GMVersion) ++ print("*******************************************************************************") + RE = refine.RefineEngine(timer, + dbFileName=options.dbname, + getUnbinned=True, +@@ -183,9 +183,9 @@ class GroopMOptionsParser(): + + elif(options.subparser_name == 'extract'): + # Extract data +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in '%s' extraction mode..." % (self.GMVersion, options.mode) +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in '%s' extraction mode..." % (self.GMVersion, options.mode)) ++ print("*******************************************************************************") + bids = [] + if options.bids is not None: + bids = options.bids +@@ -220,35 +220,35 @@ class GroopMOptionsParser(): + raise ExtractModeNotAppropriateException("mode: "+ options.mode + " is unknown") + elif(options.subparser_name == 'merge'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in bin merging mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in bin merging mode..." % self.GMVersion) ++ print("*******************************************************************************") + BM = binManager.BinManager(dbFileName=options.dbname) + BM.loadBins(timer, makeBins=True, silent=False) + BM.merge(options.bids, options.force, saveBins=True) + + elif(options.subparser_name == 'split'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in bin splitting mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in bin splitting mode..." % self.GMVersion) ++ print("*******************************************************************************") + BM = binManager.BinManager(dbFileName=options.dbname) + BM.loadBins(timer, makeBins=True, silent=False) + BM.split(options.bid, options.parts, mode=options.mode, saveBins=True, auto=options.force) + + elif(options.subparser_name == 'delete'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in bin deleting mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in bin deleting mode..." % self.GMVersion) ++ print("*******************************************************************************") + BM = binManager.BinManager(dbFileName=options.dbname) + BM.loadBins(timer, makeBins=True, silent=True)#, bids=options.bids) + BM.deleteBins(options.bids, force=options.force, saveBins=True, freeBinnedRowIndices=True) + + elif(options.subparser_name == 'plot'): +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in bin plotting mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in bin plotting mode..." % self.GMVersion) ++ print("*******************************************************************************") + BM = binManager.BinManager(dbFileName=options.dbname) + + if options.bids is None: +@@ -266,9 +266,9 @@ class GroopMOptionsParser(): + + elif(options.subparser_name == 'explore'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in bin '%s' explorer mode..." % (self.GMVersion, options.mode) +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in bin '%s' explorer mode..." % (self.GMVersion, options.mode)) ++ print("*******************************************************************************") + transform=True^options.no_transform + bids = [] + if options.bids is not None: +@@ -297,13 +297,13 @@ class GroopMOptionsParser(): + elif (options.mode == 'sidebyside'): + BE.plotSideBySide(timer, coreCut=options.cutoff) + else: +- print "**Error: unknown mode:",options.mode ++ print("**Error: unknown mode:",options.mode) + + elif(options.subparser_name == 'flyover'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Making a flyover..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Making a flyover..." % self.GMVersion) ++ print("*******************************************************************************") + bids = [] + if options.bids is not None: + bids = options.bids +@@ -323,9 +323,9 @@ class GroopMOptionsParser(): + + elif(options.subparser_name == 'highlight'): + # make bin cores +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in highlighter mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in highlighter mode..." % self.GMVersion) ++ print("*******************************************************************************") + bids = [] + if options.bids is not None: + bids = options.bids +@@ -355,9 +355,9 @@ class GroopMOptionsParser(): + BM.printBins(options.format, fileName=options.outfile) + + elif(options.subparser_name == 'dump'): +- print "*******************************************************************************" +- print " [[GroopM %s]] Running in data dumping mode..." % self.GMVersion +- print "*******************************************************************************" ++ print("*******************************************************************************") ++ print(" [[GroopM %s]] Running in data dumping mode..." % self.GMVersion) ++ print("*******************************************************************************") + + # prep fields. Do this first cause users are mot likely to + # mess this part up! +@@ -365,8 +365,8 @@ class GroopMOptionsParser(): + fields = options.fields.split(',') + for field in fields: + if field not in allowable_fields: +- print "ERROR: field '%s' not recognised. Allowable fields are:" % field +- print '\t',",".join(allowable_fields) ++ print("ERROR: field '%s' not recognised. Allowable fields are:" % field) ++ print('\t',",".join(allowable_fields)) + return + if options.separator == '\\t': + separator = '\t' +--- groopm/groopmUtils.py.orig 2014-11-26 01:01:33 UTC ++++ groopm/groopmUtils.py +@@ -62,8 +62,8 @@ np.seterr(all='raise') + from scipy.spatial.distance import cdist, squareform + + # GroopM imports +-import binManager +-import mstore ++from . import binManager ++from . import mstore + + # other local imports + from bamm.bamExtractor import BamExtractor as BMBE +@@ -126,16 +126,16 @@ class GMExtractor: + import gzip + GM_open = gzip.open + except: +- print "Error when guessing contig file mimetype" ++ print("Error when guessing contig file mimetype") + raise + with GM_open(file_name, "r") as f: + contigs = CP.getWantedSeqs(f, self.PM.contigNames, storage=contigs) + except: +- print "Could not parse contig file:",fasta[0],sys.exc_info()[0] ++ print("Could not parse contig file:",fasta[0],sys.exc_info()[0]) + raise + + # now print out the sequences +- print "Writing files" ++ print("Writing files") + for bid in self.BM.getBids(): + if self.BM.PM.isLikelyChimeric[bid]: + file_name = os.path.join(self.outDir, "%s_bin_%d.chimeric.fna" % (self.prefix, bid)) +@@ -148,9 +148,9 @@ class GMExtractor: + if(cid in contigs): + f.write(">%s\n%s\n" % (cid, contigs[cid])) + else: +- print "These are not the contigs you're looking for. ( %s )" % (cid) ++ print("These are not the contigs you're looking for. ( %s )" % (cid)) + except: +- print "Could not open file for writing:",file_name,sys.exc_info()[0] ++ print("Could not open file for writing:",file_name,sys.exc_info()[0]) + raise + + def extractReads(self, +@@ -177,7 +177,7 @@ class GMExtractor: + self.BM.loadBins(timer, makeBins=True,silent=False,bids=self.bids) + self.PM = self.BM.PM + +- print "Extracting reads" ++ print("Extracting reads") + + # work out a set of targets to pass to the parser + targets = [] +@@ -268,16 +268,16 @@ class BinExplorer: + transform = self.transform, + cutOff=coreCut) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting image" ++ print("Plotting image") + if self.bids == []: + self.bids = self.BM.getBids() + + if testing: + # ignore labelling files provided + self.binLabelsFile = "none" +- raw_input( "****************************************************************\n" ++ input( "****************************************************************\n" + " IMAGE MAKING INSTRUCTIONS - PLEASE READ CAREFULLY\n" + "****************************************************************\n" + " You are using GroopM in highlight mode. Congratulations!\n" +@@ -290,7 +290,7 @@ class BinExplorer: + " parameters to what you saw here, set bin labels, contig colours...\n\n" + " Good Luck!\n\n" + " Press return to continue...") +- print "****************************************************************" ++ print("****************************************************************") + + # bids as labels and randomise colours + self.LP = LabelParser(self.BM.getBids()) +@@ -457,9 +457,9 @@ class BinExplorer: + cutOff=coreCut, + getUnbinned=True,) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting flyover" ++ print("Plotting flyover") + + import itertools + all_bids = self.BM.getBids() +@@ -475,7 +475,7 @@ class BinExplorer: + elev_increment = total_elev_shift / total_frames + self.BM.setColorMap(self.cmString) + +- print "Need",total_frames,"frames:" ++ print("Need",total_frames,"frames:") + + """ + Handle taking out bins as "fade packets", assign indices to a list +@@ -510,17 +510,17 @@ class BinExplorer: + # make the fade schedule for the remaining bins + remaining_frames = float(total_frames - fade_schedules[0]) + num_fade_gs = float(len(fade_groups) - 1) +- fade_schedules += [len(i) for i in self.splitCeil(range(int(remaining_frames)), int(num_fade_gs))] ++ fade_schedules += [len(i) for i in self.splitCeil(list(range(int(remaining_frames))), int(num_fade_gs))] + + if False: +- print len(self.BM.getBids()), num_fade_gs +- print fade_groups +- print fade_schedules ++ print(len(self.BM.getBids()), num_fade_gs) ++ print(fade_groups) ++ print(fade_schedules) + + # plot all contigs first and then fade out + fig = plt.figure() + while len(fade_groups) >= 1: +- print "Rendering frame: %d of: %d" % (int(current_frame),int(total_frames)) ++ print("Rendering frame: %d of: %d" % (int(current_frame),int(total_frames))) + # get the next fade group and fade schedule + faders = fade_groups.pop(0) + fade_schedule = fade_schedules.pop(0) +@@ -555,16 +555,16 @@ class BinExplorer: + bids=self.bids, + transform=self.transform) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting bin profiles" ++ print("Plotting bin profiles") + self.BM.setColorMap(self.cmString) + self.BM.plotProfileDistributions() + + def plotContigs(self, timer, coreCut, all=False): + """plot contigs""" + if all: +- print "Plotting all contigs" ++ print("Plotting all contigs") + self.PM.plotAll(timer, coreCut, transform=self.transform, ignoreContigLengths=self.ignoreContigLengths) + else: + self.BM.loadBins(timer, +@@ -574,9 +574,9 @@ class BinExplorer: + transform=self.transform, + cutOff=coreCut) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting binned contigs" ++ print("Plotting binned contigs") + self.BM.setColorMap(self.cmString) + if self.bids == []: + self.bids = self.BM.getBids() +@@ -592,9 +592,9 @@ class BinExplorer: + cutOff=coreCut, + transform=self.transform) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting bin assignments" ++ print("Plotting bin assignments") + if self.bids == []: + self.bids = self.BM.getBids() + +@@ -696,9 +696,9 @@ class BinExplorer: + transform=self.transform, + cutOff=coreCut) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting bin points" ++ print("Plotting bin points") + self.BM.setColorMap(self.cmString) + self.BM.plotBinPoints() + +@@ -726,9 +726,9 @@ class BinExplorer: + self.BM.setColorMap(self.cmString) + + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting side by side graphs" ++ print("Plotting side by side graphs") + (bin_centroid_points, bin_centroid_colors, bin_centroid_gc, bin_ids) = self.BM.findCoreCentres() + self.plotCoresVsContigs(bin_centroid_points, bin_centroid_colors) + +@@ -743,15 +743,15 @@ class BinExplorer: + bids=self.bids, + transform=self.transform) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting bin IDs" ++ print("Plotting bin IDs") + self.BM.setColorMap(self.cmString) + self.BM.plotBinIds() + + def plotUnbinned(self, timer, coreCut): + """Plot all contigs over a certain length which are unbinned""" +- print "Plotting unbinned contigs" ++ print("Plotting unbinned contigs") + self.PM.plotUnbinned(timer, coreCut, transform=self.transform, ignoreContigLengths=self.ignoreContigLengths) + + def plotSideBySide(self, timer, coreCut): +@@ -763,7 +763,7 @@ class BinExplorer: + transform=self.transform, + cutOff=coreCut) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: + self.BM.setColorMap(self.cmString) + self.BM.plotBins(sideBySide=True, +@@ -779,9 +779,9 @@ class BinExplorer: + transform=self.transform, + cutOff=coreCut) + if len(self.BM.bins) == 0: +- print "Sorry, no bins to plot" ++ print("Sorry, no bins to plot") + else: +- print "Plotting all bins together" ++ print("Plotting all bins together") + self.BM.setColorMap(self.cmString) + if self.bids == []: + p_bids = self.BM.getBids() +@@ -832,7 +832,7 @@ class BinExplorer: + plt.show() + plt.close(fig) + except: +- print "Error showing image", sys.exc_info()[0] ++ print("Error showing image", sys.exc_info()[0]) + raise + del fig + else: +@@ -859,7 +859,7 @@ class BinExplorer: + plt.savefig(f_name1,dpi=dpi,format=format) + plt.close(fig) + except: +- print "Error saving image",f_name1, sys.exc_info()[0] ++ print("Error saving image",f_name1, sys.exc_info()[0]) + raise + del fig + +@@ -893,7 +893,7 @@ class BinExplorer: + plt.savefig(f_name2,dpi=dpi,format=format) + plt.close(fig) + except: +- print "Error saving image",f_name1, sys.exc_info()[0] ++ print("Error saving image",f_name1, sys.exc_info()[0]) + raise + del fig + +@@ -1137,9 +1137,9 @@ class LabelParser: + try: + self.contig2Cols[name_2_row_index[cid]] = self.rgb(fields[1]) + except KeyError: +- print "ERROR: contig name %s not recognised" % cid ++ print("ERROR: contig name %s not recognised" % cid) + except: +- print "ERROR: parsing labels file: %s" % labelFileName ++ print("ERROR: parsing labels file: %s" % labelFileName) + raise + + # now we parse the rest of the contig names and colour the null colour +@@ -1178,7 +1178,7 @@ class LabelParser: + except IndexError: pass + self.loaded[bid] = True + except: +- print "ERROR parsing labels file: %s" % labelFileName ++ print("ERROR parsing labels file: %s" % labelFileName) + raise + + def setDefaultBinLabels(self, bids): +@@ -1192,7 +1192,7 @@ class LabelParser: + S = 1.0 + V = 1.0 + if setLoaded: +- for bid in self.bin2Str.keys(): ++ for bid in list(self.bin2Str.keys()): + self.loaded[bid] = True + num_bins = len(self.bin2Str) + offset = 0.5 +@@ -1201,7 +1201,7 @@ class LabelParser: + cols = [htr(H, S, V) for H in Hs] + np.random.shuffle(cols) + i = 0 +- for bid in self.bin2Str.keys(): ++ for bid in list(self.bin2Str.keys()): + if self.loaded[bid]: + # assign the color we picked + self.bin2Cols[bid] = cols[i] +--- groopm/mstore.py.orig 2015-03-06 04:42:41 UTC ++++ groopm/mstore.py +@@ -58,7 +58,7 @@ import numpy as np + from scipy.spatial.distance import cdist, squareform + + # GroopM imports +-from PCA import PCA, Center ++from .PCA import PCA, Center + + # BamM imports + try: +@@ -66,7 +66,7 @@ try: + from bamm.cWrapper import * + from bamm.bamFile import BM_coverageType as BMCT + except ImportError: +- print """ERROR: There was an error importing BamM. This probably means that ++ print("""ERROR: There was an error importing BamM. This probably means that + BamM is not installed properly or not in your PYTHONPATH. Installation + instructions for BamM are located at: + +@@ -79,7 +79,7 @@ you still encounter this error. Please lodge a bug rep + + Exiting... + -------------------------------------------------------------------------------- +-""" ++""") + import sys + sys.exit(-1) + +@@ -217,12 +217,12 @@ class GMDataManager: + if(not force): + user_option = self.promptOnOverwrite(dbFileName) + if(user_option != "Y"): +- print "Operation cancelled" ++ print("Operation cancelled") + return False + else: +- print "Overwriting database",dbFileName ++ print("Overwriting database",dbFileName) + except IOError as e: +- print "Creating new database", dbFileName ++ print("Creating new database", dbFileName) + + # create the db + try: +@@ -251,19 +251,19 @@ class GMDataManager: + import gzip + GM_open = gzip.open + except: +- print "Error when guessing contig file mimetype" ++ print("Error when guessing contig file mimetype") + raise + try: + with GM_open(contigsFile, "r") as f: + try: + (con_names, con_gcs, con_lengths, con_ksigs) = conParser.parse(f, cutoff, kse) + num_cons = len(con_names) +- cid_2_indices = dict(zip(con_names, range(num_cons))) ++ cid_2_indices = dict(list(zip(con_names, list(range(num_cons))))) + except: +- print "Error parsing contigs" ++ print("Error parsing contigs") + raise + except: +- print "Could not parse contig file:",contigsFile,exc_info()[0] ++ print("Could not parse contig file:",contigsFile,exc_info()[0]) + raise + + #------------------------ +@@ -280,15 +280,15 @@ class GMDataManager: + if len(bad_indices) > 0: + # report the bad contigs to the user + # and strip them before writing to the DB +- print "****************************************************************" +- print " IMPORTANT! - there are %d contigs with 0 coverage" % len(bad_indices) +- print " across all stoits. They will be ignored:" +- print "****************************************************************" +- for i in xrange(0, min(5, len(bad_indices))): +- print con_names[bad_indices[i]] ++ print("****************************************************************") ++ print(" IMPORTANT! - there are %d contigs with 0 coverage" % len(bad_indices)) ++ print(" across all stoits. They will be ignored:") ++ print("****************************************************************") ++ for i in range(0, min(5, len(bad_indices))): ++ print(con_names[bad_indices[i]]) + if len(bad_indices) > 5: +- print '(+ %d additional contigs)' % (len(bad_indices)-5) +- print "****************************************************************" ++ print('(+ %d additional contigs)' % (len(bad_indices)-5)) ++ print("****************************************************************") + + con_names = con_names[good_indices] + con_lengths = con_lengths[good_indices] +@@ -314,14 +314,14 @@ class GMDataManager: + expectedrows=num_cons + ) + except: +- print "Error creating KMERSIG table:", exc_info()[0] ++ print("Error creating KMERSIG table:", exc_info()[0]) + raise + + # compute the PCA of the ksigs and store these too + pc_ksigs, sumvariance = conParser.PCAKSigs(con_ksigs) + + db_desc = [] +- for i in xrange(0, len(pc_ksigs[0])): ++ for i in range(0, len(pc_ksigs[0])): + db_desc.append(('pc' + str(i+1), float)) + + try: +@@ -332,7 +332,7 @@ class GMDataManager: + expectedrows=num_cons + ) + except: +- print "Error creating KMERVALS table:", exc_info()[0] ++ print("Error creating KMERVALS table:", exc_info()[0]) + raise + + #------------------------ +@@ -378,7 +378,7 @@ class GMDataManager: + title="Bam based coverage", + expectedrows=num_cons) + except: +- print "Error creating coverage table:", exc_info()[0] ++ print("Error creating coverage table:", exc_info()[0]) + raise + + # transformed coverages +@@ -392,7 +392,7 @@ class GMDataManager: + title="Transformed coverage", + expectedrows=num_cons) + except: +- print "Error creating transformed coverage table:", exc_info()[0] ++ print("Error creating transformed coverage table:", exc_info()[0]) + raise + + # transformed coverage corners +@@ -406,7 +406,7 @@ class GMDataManager: + title="Transformed coverage corners", + expectedrows=len(stoitColNames)) + except: +- print "Error creating transformed coverage corner table:", exc_info()[0] ++ print("Error creating transformed coverage corner table:", exc_info()[0]) + raise + + # normalised coverages +@@ -418,16 +418,16 @@ class GMDataManager: + title="Normalised coverage", + expectedrows=num_cons) + except: +- print "Error creating normalised coverage table:", exc_info()[0] ++ print("Error creating normalised coverage table:", exc_info()[0]) + raise + + #------------------------ + # Add a table for the contigs + #------------------------ + self.setBinAssignments((h5file, meta_group), +- image=zip(con_names, ++ image=list(zip(con_names, + [0]*num_cons, +- con_lengths, con_gcs) ++ con_lengths, con_gcs)) + ) + + #------------------------ +@@ -435,7 +435,7 @@ class GMDataManager: + #------------------------ + self.initBinStats((h5file, meta_group)) + +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + + #------------------------ + # contig links +@@ -454,9 +454,9 @@ class GMDataManager: + title="ContigLinks", + expectedrows=len(rowwise_links)) + except: +- print "Error creating links table:", exc_info()[0] ++ print("Error creating links table:", exc_info()[0]) + raise +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + + #------------------------ + # Add metadata +@@ -475,12 +475,12 @@ class GMDataManager: + + # kmer signature variance table + pc_var = [sumvariance[0]] +- for i in xrange(1, len(sumvariance)): ++ for i in range(1, len(sumvariance)): + pc_var.append(sumvariance[i]-sumvariance[i-1]) + pc_var = tuple(pc_var) + + db_desc = [] +- for i in xrange(0, len(pc_var)): ++ for i in range(0, len(pc_var)): + db_desc.append(('pc' + str(i+1) + '_var', float)) + + try: +@@ -491,20 +491,20 @@ class GMDataManager: + expectedrows=1 + ) + except: +- print "Error creating tmp_kpca_variance table:", exc_info()[0] ++ print("Error creating tmp_kpca_variance table:", exc_info()[0]) + raise + + except: +- print "Error creating database:", dbFileName, exc_info()[0] ++ print("Error creating database:", dbFileName, exc_info()[0]) + raise + +- print "****************************************************************" +- print "Data loaded successfully!" +- print " ->",num_cons,"contigs" +- print " ->",len(stoitColNames),"BAM files" +- print "Written to: '"+dbFileName+"'" +- print "****************************************************************" +- print " %s" % timer.getTimeStamp() ++ print("****************************************************************") ++ print("Data loaded successfully!") ++ print(" ->",num_cons,"contigs") ++ print(" ->",len(stoitColNames),"BAM files") ++ print("Written to: '"+dbFileName+"'") ++ print("****************************************************************") ++ print(" %s" % timer.getTimeStamp()) + + # all good! + return True +@@ -516,17 +516,17 @@ class GMDataManager: + vrs = ",".join([str.lower(str(x)) for x in valid_responses]) + while(input_not_ok): + if(minimal): +- option = raw_input(" Overwrite? ("+vrs+") : ") ++ option = input(" Overwrite? ("+vrs+") : ") + else: + +- option = raw_input(" ****WARNING**** Database: '"+dbFileName+"' exists.\n" \ ++ option = input(" ****WARNING**** Database: '"+dbFileName+"' exists.\n" \ + " If you continue you *WILL* delete any previous analyses!\n" \ + " Overwrite? ("+vrs+") : ") + if(option.upper() in valid_responses): +- print "****************************************************************" ++ print("****************************************************************") + return option.upper() + else: +- print "Error, unrecognised choice '"+option.upper()+"'" ++ print("Error, unrecognised choice '"+option.upper()+"'") + minimal = True + + #------------------------------------------------------------------------------ +@@ -538,7 +538,7 @@ class GMDataManager: + this_DB_version = self.getGMDBFormat(dbFileName) + if __current_GMDB_version__ == this_DB_version: + if not silent: +- print " GroopM DB version (%s) up to date" % this_DB_version ++ print(" GroopM DB version (%s) up to date" % this_DB_version) + return + + # now, if we get here then we need to do some work +@@ -558,14 +558,14 @@ class GMDataManager: + + def upgradeDB_0_to_1(self, dbFileName): + """Upgrade a GM db from version 0 to version 1""" +- print "*******************************************************************************\n" +- print " *** Upgrading GM DB from version 0 to version 1 ***" +- print "" +- print " please be patient..." +- print "" ++ print("*******************************************************************************\n") ++ print(" *** Upgrading GM DB from version 0 to version 1 ***") ++ print("") ++ print(" please be patient...") ++ print("") + # the change in this version is that we'll be saving the first + # two kmerSig PCA's in a separate table +- print " Calculating and storing the kmerSig PCAs" ++ print(" Calculating and storing the kmerSig PCAs") + + # compute the PCA of the ksigs + ksigs = self.getKmerSigs(dbFileName) +@@ -585,26 +585,26 @@ class GMDataManager: + expectedrows=num_cons + ) + except: +- print "Error creating KMERVALS table:", exc_info()[0] ++ print("Error creating KMERVALS table:", exc_info()[0]) + raise + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + # update the formatVersion field and we're done + self.setGMDBFormat(dbFileName, 1) +- print "*******************************************************************************" ++ print("*******************************************************************************") + + def upgradeDB_1_to_2(self, dbFileName): + """Upgrade a GM db from version 1 to version 2""" +- print "*******************************************************************************\n" +- print " *** Upgrading GM DB from version 1 to version 2 ***" +- print "" +- print " please be patient..." +- print "" ++ print("*******************************************************************************\n") ++ print(" *** Upgrading GM DB from version 1 to version 2 ***") ++ print("") ++ print(" please be patient...") ++ print("") + # the change in this version is that we'll be saving a variable number of kmerSig PCA's + # and GC information for each contig +- print " Calculating and storing the kmer signature PCAs" ++ print(" Calculating and storing the kmer signature PCAs") + + # grab any data needed from database before opening if for modification + bin_ids = self.getBins(dbFileName) +@@ -617,7 +617,7 @@ class GMDataManager: + num_cons = len(pc_ksigs) + + db_desc = [] +- for i in xrange(0, len(pc_ksigs[0])): ++ for i in range(0, len(pc_ksigs[0])): + db_desc.append(('pc' + str(i+1), float)) + + try: +@@ -639,11 +639,11 @@ class GMDataManager: + h5file.renameNode(pg, 'kpca', 'tmp_kpca', overwrite=True) + + except: +- print "Error creating kpca table:", exc_info()[0] ++ print("Error creating kpca table:", exc_info()[0]) + raise + + # Add GC +- contigFile = raw_input('\nPlease specify fasta file containing the bam reference sequences: ') ++ contigFile = input('\nPlease specify fasta file containing the bam reference sequences: ') + with open(contigFile, "r") as f: + try: + contigInfo = {} +@@ -657,7 +657,7 @@ class GMDataManager: + con_gcs = np.array([contigInfo[cid][1] for cid in con_names]) + con_lengths = np.array([contigInfo[cid][0] for cid in con_names]) + except: +- print "Error parsing contigs" ++ print("Error parsing contigs") + raise + + # remove any contigs not in the current DB (these were removed due to having zero coverage) +@@ -670,27 +670,27 @@ class GMDataManager: + + mg = h5file.getNode('/', name='meta') + self.setBinAssignments((h5file, mg), +- image=zip(con_names, ++ image=list(zip(con_names, + bin_ids, +- con_lengths, con_gcs) ++ con_lengths, con_gcs)) + ) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + # update the formatVersion field and we're done + self.setGMDBFormat(dbFileName, 2) +- print "*******************************************************************************" ++ print("*******************************************************************************") + + def upgradeDB_2_to_3(self, dbFileName): + """Upgrade a GM db from version 2 to version 3""" +- print "*******************************************************************************\n" +- print " *** Upgrading GM DB from version 2 to version 3 ***" +- print "" +- print " please be patient..." +- print "" ++ print("*******************************************************************************\n") ++ print(" *** Upgrading GM DB from version 2 to version 3 ***") ++ print("") ++ print(" please be patient...") ++ print("") + # the change in this version is that we'll be saving the variance for each kmerSig PCA +- print " Calculating and storing variance of kmer signature PCAs" ++ print(" Calculating and storing variance of kmer signature PCAs") + + # compute the PCA of the ksigs + conParser = ContigParser() +@@ -699,12 +699,12 @@ class GMDataManager: + + # calcualte variance of each PC + pc_var = [sumvariance[0]] +- for i in xrange(1, len(sumvariance)): ++ for i in range(1, len(sumvariance)): + pc_var.append(sumvariance[i]-sumvariance[i-1]) + pc_var = tuple(pc_var) + + db_desc = [] +- for i in xrange(0, len(pc_var)): ++ for i in range(0, len(pc_var)): + db_desc.append(('pc' + str(i+1) + '_var', float)) + + try: +@@ -726,26 +726,26 @@ class GMDataManager: + h5file.renameNode(meta, 'kpca_variance', 'tmp_kpca_variance', overwrite=True) + + except: +- print "Error creating kpca_variance table:", exc_info()[0] ++ print("Error creating kpca_variance table:", exc_info()[0]) + raise + except: +- print "Error opening DB:", dbFileName, exc_info()[0] ++ print("Error opening DB:", dbFileName, exc_info()[0]) + raise + + # update the formatVersion field and we're done + self.setGMDBFormat(dbFileName, 3) +- print "*******************************************************************************" ++ print("*******************************************************************************") + + def upgradeDB_3_to_4(self, dbFileName): + """Upgrade a GM db from version 3 to version 4""" +- print "*******************************************************************************\n" +- print " *** Upgrading GM DB from version 3 to version 4 ***" +- print "" +- print " please be patient..." +- print "" ++ print("*******************************************************************************\n") ++ print(" *** Upgrading GM DB from version 3 to version 4 ***") ++ print("") ++ print(" please be patient...") ++ print("") + # the change in this version is that we'll be saving the variance for each kmerSig PCA +- print " Adding chimeric flag for each bin." +- print " !!! Groopm core must be run again for this flag to be properly set. !!!" ++ print(" Adding chimeric flag for each bin.") ++ print(" !!! Groopm core must be run again for this flag to be properly set. !!!") + + # read existing data in 'bins' table + try: +@@ -755,7 +755,7 @@ class GMDataManager: + for row in all_rows: + ret_dict[row[0]] = row[1] + except: +- print "Error opening DB:", dbFileName, exc_info()[0] ++ print("Error opening DB:", dbFileName, exc_info()[0]) + raise + + # write new table with chimeric flag set to False by default +@@ -785,28 +785,28 @@ class GMDataManager: + title="Bin information", + expectedrows=1) + except: +- print "Error creating META table:", exc_info()[0] ++ print("Error creating META table:", exc_info()[0]) + raise + + h5file.renameNode(mg, 'bins', 'tmp_bins', overwrite=True) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + # update the formatVersion field and we're done + self.setGMDBFormat(dbFileName, 4) +- print "*******************************************************************************" ++ print("*******************************************************************************") + + def upgradeDB_4_to_5(self, dbFileName): + """Upgrade a GM db from version 4 to version 5""" +- print "*******************************************************************************\n" +- print " *** Upgrading GM DB from version 4 to version 5 ***" +- print "" +- print " please be patient..." +- print "" ++ print("*******************************************************************************\n") ++ print(" *** Upgrading GM DB from version 4 to version 5 ***") ++ print("") ++ print(" please be patient...") ++ print("") + # the change in this version is that we'll be saving the transformed coverage coords +- print " Saving transformed coverage profiles" +- print " You will not need to re-run parse or core due to this change" ++ print(" Saving transformed coverage profiles") ++ print(" You will not need to re-run parse or core due to this change") + + # we need to get the raw coverage profiles and the kmerPCA1 data + indices = self.getConditionalIndices(dbFileName, silent=False, checkUpgrade=False) +@@ -849,7 +849,7 @@ class GMDataManager: + title="Bam based coverage", + expectedrows=CT.numContigs) + except: +- print "Error creating coverage table:", exc_info()[0] ++ print("Error creating coverage table:", exc_info()[0]) + raise + + h5file.renameNode(profile_group, 'coverage', 'tmp_coverages', overwrite=True) +@@ -865,7 +865,7 @@ class GMDataManager: + title="Transformed coverage", + expectedrows=CT.numContigs) + except: +- print "Error creating transformed coverage table:", exc_info()[0] ++ print("Error creating transformed coverage table:", exc_info()[0]) + raise + + # transformed coverage corners +@@ -879,7 +879,7 @@ class GMDataManager: + title="Transformed coverage corners", + expectedrows=CT.numStoits) + except: +- print "Error creating transformed coverage corner table:", exc_info()[0] ++ print("Error creating transformed coverage corner table:", exc_info()[0]) + raise + + +@@ -892,7 +892,7 @@ class GMDataManager: + title="Normalised coverage", + expectedrows=CT.numContigs) + except: +- print "Error creating normalised coverage table:", exc_info()[0] ++ print("Error creating normalised coverage table:", exc_info()[0]) + raise + + # stoit col names may have been shuffled +@@ -912,7 +912,7 @@ class GMDataManager: + + # update the formatVersion field and we're done + self.setGMDBFormat(dbFileName, 5) +- print "*******************************************************************************" ++ print("*******************************************************************************") + + + #------------------------------------------------------------------------------ +@@ -925,7 +925,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='r') as h5file: + full_record = [list(x) for x in h5file.root.links.links.readWhere("contig1 >= 0")] + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + if indices == []: +@@ -958,7 +958,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='r') as h5file: + return np.array([x.nrow for x in h5file.root.meta.contigs.where(condition)]) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getCoverageProfiles(self, dbFileName, condition='', indices=np.array([])): +@@ -972,7 +972,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(h5file.root.profile.coverage[x.nrow]) for x in h5file.root.meta.contigs.where(condition)]) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getTransformedCoverageProfiles(self, dbFileName, condition='', indices=np.array([])): +@@ -986,7 +986,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(h5file.root.profile.transCoverage[x.nrow]) for x in h5file.root.meta.contigs.where(condition)]) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getNormalisedCoverageProfiles(self, dbFileName, condition='', indices=np.array([])): +@@ -1000,12 +1000,12 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(h5file.root.profile.normCoverage[x.nrow]) for x in h5file.root.meta.contigs.where(condition)]) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def nukeBins(self, dbFileName): + """Reset all bin information, completely""" +- print " Clearing all old bin information from",dbFileName ++ print(" Clearing all old bin information from",dbFileName) + self.setBinStats(dbFileName, []) + self.setNumBins(dbFileName, 0) + self.setBinAssignments(dbFileName, updates={}, nuke=True) +@@ -1061,13 +1061,13 @@ class GMDataManager: + title="Bin information", + expectedrows=1) + except: +- print "Error creating META table:", exc_info()[0] ++ print("Error creating META table:", exc_info()[0]) + raise + + # rename the tmp table to overwrite + h5file.renameNode(mg, 'bins', 'tmp_bins', overwrite=True) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getBinStats(self, dbFileName): +@@ -1085,7 +1085,7 @@ class GMDataManager: + + return ret_dict + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + return {} + +@@ -1100,7 +1100,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(x)[1] for x in h5file.root.meta.contigs.readWhere(condition)]).ravel() + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def setBinAssignments(self, storage, updates=None, image=None, nuke=False): +@@ -1135,17 +1135,17 @@ class GMDataManager: + bins = self.getBins(dbFileName) + + # now apply the updates +- for tr in updates.keys(): ++ for tr in list(updates.keys()): + bins[tr] = updates[tr] + + # and build the image +- image = np.array(zip(contig_names, bins, contig_lengths, contig_gcs), ++ image = np.array(list(zip(contig_names, bins, contig_lengths, contig_gcs)), + dtype=db_desc) + + try: + h5file = tables.openFile(dbFileName, mode='a') + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + meta_group = h5file.getNode('/', name='meta') + closeh5 = True +@@ -1157,7 +1157,7 @@ class GMDataManager: + image = np.array(image, + dtype=db_desc) + else: +- print "get with the program dude" ++ print("get with the program dude") + return + + # now we write the data +@@ -1174,7 +1174,7 @@ class GMDataManager: + title="Contig information", + expectedrows=num_cons) + except: +- print "Error creating CONTIG table:", exc_info()[0] ++ print("Error creating CONTIG table:", exc_info()[0]) + raise + + # rename the tmp table to overwrite +@@ -1193,7 +1193,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(x)[0] for x in h5file.root.meta.contigs.readWhere(condition)]).ravel() + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getContigLengths(self, dbFileName, condition='', indices=np.array([])): +@@ -1207,7 +1207,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(x)[2] for x in h5file.root.meta.contigs.readWhere(condition)]).ravel() + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getContigGCs(self, dbFileName, condition='', indices=np.array([])): +@@ -1221,7 +1221,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(x)[3] for x in h5file.root.meta.contigs.readWhere(condition)]).ravel() + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getKmerSigs(self, dbFileName, condition='', indices=np.array([])): +@@ -1235,7 +1235,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(h5file.root.profile.kms[x.nrow]) for x in h5file.root.meta.contigs.where(condition)]) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getKmerPCAs(self, dbFileName, condition='', indices=np.array([])): +@@ -1249,7 +1249,7 @@ class GMDataManager: + condition = "cid != ''" # no condition breaks everything! + return np.array([list(h5file.root.profile.kpca[x.nrow]) for x in h5file.root.meta.contigs.where(condition)]) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + #------------------------------------------------------------------------------ +@@ -1261,7 +1261,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='r') as h5file: + return np.array(list(h5file.root.meta.kpca_variance[0])) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getTransformedCoverageCorners(self, dbFileName): +@@ -1270,7 +1270,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='r') as h5file: + return np.array([list(x) for x in h5file.root.meta.transCoverageCorners.read()]) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def setMeta(self, h5file, metaData, overwrite=False): +@@ -1310,7 +1310,7 @@ class GMDataManager: + "Descriptive data", + expectedrows=1) + except: +- print "Error creating META table:", exc_info()[0] ++ print("Error creating META table:", exc_info()[0]) + raise + + if overwrite: +@@ -1324,7 +1324,7 @@ class GMDataManager: + # theres only one value + return h5file.root.meta.meta.read()[fieldName][0] + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def setGMDBFormat(self, dbFileName, version): +@@ -1344,7 +1344,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='a', rootUEP="/") as h5file: + self.setMeta(h5file, meta_data, overwrite=True) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getGMDBFormat(self, dbFileName): +@@ -1397,7 +1397,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='a', rootUEP="/") as h5file: + self.setMeta(h5file, meta_data, overwrite=True) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def getNumBins(self, dbFileName): +@@ -1417,7 +1417,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='r') as h5file: + return h5file.root.meta.meta.read()['clustered'] + except: +- print "Error opening database:", dbFileName, exc_info()[0] ++ print("Error opening database:", dbFileName, exc_info()[0]) + raise + + def setClustered(self, dbFileName, state): +@@ -1437,7 +1437,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='a', rootUEP="/") as h5file: + self.setMeta(h5file, meta_data, overwrite=True) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + def isComplete(self, dbFileName): +@@ -1446,7 +1446,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='r') as h5file: + return h5file.root.meta.meta.read()['complete'] + except: +- print "Error opening database:", dbFileName, exc_info()[0] ++ print("Error opening database:", dbFileName, exc_info()[0]) + raise + + def setComplete(self, dbFileName, state): +@@ -1466,7 +1466,7 @@ class GMDataManager: + with tables.openFile(dbFileName, mode='a', rootUEP="/") as h5file: + self.setMeta(h5file, meta_data, overwrite=True) + except: +- print "Error opening DB:",dbFileName, exc_info()[0] ++ print("Error opening DB:",dbFileName, exc_info()[0]) + raise + + #------------------------------------------------------------------------------ +@@ -1543,7 +1543,7 @@ class GMDataManager: + fh.write(separator+data_converters[j](data_arrays[j][i])) + fh.write('\n') + except: +- print "Error opening output file %s for writing" % outFile ++ print("Error opening output file %s for writing" % outFile) + raise + + ############################################################################### +@@ -1574,7 +1574,7 @@ class ContigParser: + + def parse(self, contigFile, cutoff, kse): + """Do the heavy lifting of parsing""" +- print "Parsing contigs" ++ print("Parsing contigs") + contigInfo = {} # save everything here first so we can sort accordingly + for cid,seq in self.readFasta(contigFile): + if len(seq) >= cutoff: +@@ -1619,7 +1619,7 @@ class ContigParser: + + def getWantedSeqs(self, contigFile, wanted, storage={}): + """Do the heavy lifting of parsing""" +- print "Parsing contigs" ++ print("Parsing contigs") + for cid,seq in self.readFasta(contigFile): + if(cid in wanted): + storage[cid] = seq +@@ -1696,7 +1696,7 @@ class KmerSigEngine: + returns a tuple of floats which is the kmer sig + """ + # tmp storage +- sig = dict(zip(self.kmerCols, [0.0] * self.numMers)) ++ sig = dict(list(zip(self.kmerCols, [0.0] * self.numMers))) + # the number fo kmers in this sequence + num_mers = len(seq)-self.kLen+1 + for i in range(0,num_mers): +@@ -1710,7 +1710,7 @@ class KmerSigEngine: + try: + return tuple([sig[x] / num_mers for x in self.kmerCols]) + except ZeroDivisionError: +- print "***WARNING*** Sequence '%s' is not playing well with the kmer signature engine " % seq ++ print("***WARNING*** Sequence '%s' is not playing well with the kmer signature engine " % seq) + return tuple([0.0] * self.numMers) + + ############################################################################### +@@ -1724,7 +1724,7 @@ class BamParser: + + def parse(self, bamFiles, contigNames, cid2Indices, threads): + """Parse multiple bam files and store the results in the main DB""" +- print "Parsing BAM files using %d threads" % threads ++ print("Parsing BAM files using %d threads" % threads) + + BP = BMBP(BMCT(CT.P_MEAN_TRIMMED, 5, 5)) + BP.parseBams(bamFiles, +@@ -1736,8 +1736,8 @@ class BamParser: + # we need to make sure that the ordering of contig names is consistent + # first we get a dict that connects a contig name to the index in + # the coverages array +- con_name_lookup = dict(zip(BP.BFI.contigNames, +- range(len(BP.BFI.contigNames)))) ++ con_name_lookup = dict(list(zip(BP.BFI.contigNames, ++ list(range(len(BP.BFI.contigNames)))))) + + # Next we build the cov_sigs array by appending the coverage + # profiles in the same order. We need to handle the case where +@@ -1799,7 +1799,7 @@ class CoverageTransformer: + self.kmerNormPC1 = kmerNormPC1 + self.covProfiles = coverageProfiles + self.stoitColNames = stoitColNames +- self.indices = range(self.numContigs) ++ self.indices = list(range(self.numContigs)) + self.scaleFactor = scaleFactor + + # things we care about! +@@ -1814,8 +1814,8 @@ class CoverageTransformer: + shrinkFn = lambda x:x + + if(not silent): +- print " Reticulating splines" +- print " Dimensionality reduction" ++ print(" Reticulating splines") ++ print(" Dimensionality reduction") + + unit_vectors = [(np.cos(i*2*np.pi/self.numStoits),np.sin(i*2*np.pi/self.numStoits)) for i in range(self.numStoits)] + +@@ -1954,13 +1954,13 @@ class CoverageTransformer: + # so we need to make sure that we get all the nodes in the ordering list + trier = 0 # start of a new disjoint ring + ordering = [trier] +- while len(ordering) < len(lr_dict.keys()): ++ while len(ordering) < len(list(lr_dict.keys())): + try: + adding_index = lr_dict[trier][0] # ok IF this guy has a registered neighbour + if adding_index in ordering: # NOT ok if the neighbour is already in the list + raise IndexError() + ordering.append(adding_index) +- while len(ordering) < len(lr_dict.keys()): # try consume the entire ring ++ while len(ordering) < len(list(lr_dict.keys())): # try consume the entire ring + # len(ordering) >= 2 + last = ordering[-1] + if lr_dict[last][0] == ordering[-2]: # bi-directionality means this will always work +@@ -1973,7 +1973,7 @@ class CoverageTransformer: + # stick (2 city system) + while(trier in ordering): # find the next index NOT in the ordering + trier += 1 +- if trier < len(lr_dict.keys()): # make sure it makes sense ++ if trier < len(list(lr_dict.keys())): # make sure it makes sense + ordering.append(trier) + break + else: +@@ -1985,14 +1985,14 @@ class CoverageTransformer: + # single point + while(trier in ordering): + trier += 1 +- if trier < len(lr_dict.keys()): # make sure it makes sense ++ if trier < len(list(lr_dict.keys())): # make sure it makes sense + ordering.append(trier) + + # sanity check + if len(ordering) != self.numStoits: +- print "WATTUP, ordering is looking wrong!" +- print ordering +- print lr_dict ++ print("WATTUP, ordering is looking wrong!") ++ print(ordering) ++ print(lr_dict) + + # reshuffle the contig order! + # yay for bubble sort! +--- groopm/profileManager.py.orig 2015-03-06 07:00:49 UTC ++++ groopm/profileManager.py +@@ -96,10 +96,10 @@ from scipy.spatial import KDTree as kdt + from scipy.stats import f_oneway, distributions + + # GroopM imports +-from PCA import PCA, Center +-from mstore import GMDataManager +-from bin import Bin, mungeCbar +-import groopmExceptions as ge ++from .PCA import PCA, Center ++from .mstore import GMDataManager ++from .bin import Bin, mungeCbar ++from . import groopmExceptions as ge + + np_seterr(all='raise') + +@@ -179,7 +179,7 @@ class ProfileManager: + if(silent): + verbose=False + if verbose: +- print "Loading data from:", self.dbFileName ++ print("Loading data from:", self.dbFileName) + + try: + self.numStoits = self.getNumStoits() +@@ -188,19 +188,19 @@ class ProfileManager: + condition=condition, + silent=silent) + if(verbose): +- print " Loaded indices with condition:", condition ++ print(" Loaded indices with condition:", condition) + self.numContigs = len(self.indices) + + if self.numContigs == 0: +- print " ERROR: No contigs loaded using condition:", condition ++ print(" ERROR: No contigs loaded using condition:", condition) + return + + if(not silent): +- print " Working with: %d contigs" % self.numContigs ++ print(" Working with: %d contigs" % self.numContigs) + + if(loadCovProfiles): + if(verbose): +- print " Loading coverage profiles" ++ print(" Loading coverage profiles") + self.covProfiles = self.dataManager.getCoverageProfiles(self.dbFileName, indices=self.indices) + self.normCoverages = self.dataManager.getNormalisedCoverageProfiles(self.dbFileName, indices=self.indices) + +@@ -209,14 +209,14 @@ class ProfileManager: + + if loadRawKmers: + if(verbose): +- print " Loading RAW kmer sigs" ++ print(" Loading RAW kmer sigs") + self.kmerSigs = self.dataManager.getKmerSigs(self.dbFileName, indices=self.indices) + + if(loadKmerPCs): + self.kmerPCs = self.dataManager.getKmerPCAs(self.dbFileName, indices=self.indices) + + if(verbose): +- print " Loading PCA kmer sigs (" + str(len(self.kmerPCs[0])) + " dimensional space)" ++ print(" Loading PCA kmer sigs (" + str(len(self.kmerPCs[0])) + " dimensional space)") + + self.kmerNormPC1 = np_copy(self.kmerPCs[:,0]) + self.kmerNormPC1 -= np_min(self.kmerNormPC1) +@@ -226,26 +226,26 @@ class ProfileManager: + self.kmerVarPC = self.dataManager.getKmerVarPC(self.dbFileName, indices=self.indices) + + if(verbose): +- print " Loading PCA kmer variance (total variance: %.2f" % np_sum(self.kmerVarPC) + ")" ++ print(" Loading PCA kmer variance (total variance: %.2f" % np_sum(self.kmerVarPC) + ")") + + if(loadContigNames): + if(verbose): +- print " Loading contig names" ++ print(" Loading contig names") + self.contigNames = self.dataManager.getContigNames(self.dbFileName, indices=self.indices) + + if(loadContigLengths): + self.contigLengths = self.dataManager.getContigLengths(self.dbFileName, indices=self.indices) + if(verbose): +- print " Loading contig lengths (Total: %d BP)" % ( sum(self.contigLengths) ) ++ print(" Loading contig lengths (Total: %d BP)" % ( sum(self.contigLengths) )) + + if(loadContigGCs): + self.contigGCs = self.dataManager.getContigGCs(self.dbFileName, indices=self.indices) + if(verbose): +- print " Loading contig GC ratios (Average GC: %0.3f)" % ( np_mean(self.contigGCs) ) ++ print(" Loading contig GC ratios (Average GC: %0.3f)" % ( np_mean(self.contigGCs) )) + + if(makeColors): + if(verbose): +- print " Creating color map" ++ print(" Creating color map") + + # use HSV to RGB to generate colors + S = 1 # SAT and VAL remain fixed at 1. Reduce to make +@@ -254,7 +254,7 @@ class ProfileManager: + + if(loadBins): + if(verbose): +- print " Loading bin assignments" ++ print(" Loading bin assignments") + + self.binIds = self.dataManager.getBins(self.dbFileName, indices=self.indices) + +@@ -289,7 +289,7 @@ class ProfileManager: + self.stoitColNames = self.getStoitColNames() + + except: +- print "Error loading DB:", self.dbFileName, exc_info()[0] ++ print("Error loading DB:", self.dbFileName, exc_info()[0]) + raise + + def reduceIndices(self, deadRowIndices): +@@ -419,7 +419,7 @@ class ProfileManager: + # we'd like to take it down to about 1500 or so RI's + # but we'd like to do this in a repeatable way + ideal_contig_num = 1500 +- sub_cons = range(len(self.indices)) ++ sub_cons = list(range(len(self.indices))) + while len(sub_cons) > ideal_contig_num: + # select every second contig when sorted by norm cov + cov_sorted = np_argsort(self.normCoverages[sub_cons]) +@@ -497,7 +497,7 @@ class ProfileManager: + def transformCP(self, timer, silent=False, nolog=False): + """Do the main transformation on the coverage profile data""" + if(not silent): +- print " Reticulating splines" ++ print(" Reticulating splines") + self.transformedCP = self.dataManager.getTransformedCoverageProfiles(self.dbFileName, indices=self.indices) + self.corners = self.dataManager.getTransformedCoverageCorners(self.dbFileName) + self.TCentre = np_mean(self.corners, axis=0) +@@ -530,7 +530,7 @@ class ProfileManager: + def createColorMapHSV(self): + S = 1.0 + V = 1.0 +- return LinearSegmentedColormap.from_list('GC', [htr((1.0 + np_sin(np_pi * (val/1000.0) - np_pi/2))/2., S, V) for val in xrange(0, 1000)], N=1000) ++ return LinearSegmentedColormap.from_list('GC', [htr((1.0 + np_sin(np_pi * (val/1000.0) - np_pi/2))/2., S, V) for val in range(0, 1000)], N=1000) + + def setColorMap(self, colorMapStr): + if colorMapStr == 'HSV': +@@ -617,7 +617,7 @@ class ProfileManager: + if self.numStoits == 3: + self.transformedCP = self.covProfiles + else: +- print "Number of stoits != 3. You need to transform" ++ print("Number of stoits != 3. You need to transform") + self.transformCP(timer) + + fig = plt.figure() +@@ -633,7 +633,7 @@ class ProfileManager: + plt.show() + plt.close(fig) + except: +- print "Error showing image", exc_info()[0] ++ print("Error showing image", exc_info()[0]) + raise + del fig + +@@ -646,7 +646,7 @@ class ProfileManager: + if self.numStoits == 3: + self.transformedCP = self.covProfiles + else: +- print "Number of stoits != 3. You need to transform" ++ print("Number of stoits != 3. You need to transform") + self.transformCP(timer) + + fig = plt.figure() +@@ -691,7 +691,7 @@ class ProfileManager: + plt.show() + plt.close(fig) + except: +- print "Error showing image", exc_info()[0] ++ print("Error showing image", exc_info()[0]) + raise + del fig + +@@ -801,7 +801,7 @@ class ProfileManager: + ax = fig.add_subplot(111, projection='3d') + if len(restrictedBids) == 0: + if highlight is None: +- print "BF:", np_shape(self.transformedCP) ++ print("BF:", np_shape(self.transformedCP)) + if ignoreContigLengths: + sc = ax.scatter(self.transformedCP[:,0], + self.transformedCP[:,1], +@@ -895,7 +895,7 @@ class ProfileManager: + marker='.') + sc.set_edgecolors = sc.set_facecolors = lambda *args:None # disable depth transparency effect + +- print np_shape(disp_vals), np_shape(hide_vals), np_shape(self.transformedCP) ++ print(np_shape(disp_vals), np_shape(hide_vals), np_shape(self.transformedCP)) + + # render color bar + cbar = plt.colorbar(sc, shrink=0.5) +@@ -914,7 +914,7 @@ class ProfileManager: + r_cols = np_append(r_cols, self.contigGCs[i]) + num_added += 1 + r_trans = np_reshape(r_trans, (num_added,3)) +- print np_shape(r_trans) ++ print(np_shape(r_trans)) + #r_cols = np_reshape(r_cols, (num_added,3)) + sc = ax.scatter(r_trans[:,0], + r_trans[:,1], +@@ -958,13 +958,13 @@ class ProfileManager: + fig.set_size_inches(primaryWidth,primaryWidth) + plt.savefig(fileName,dpi=dpi,format=format) + except: +- print "Error saving image",fileName, exc_info()[0] ++ print("Error saving image",fileName, exc_info()[0]) + raise + elif(show): + try: + plt.show() + except: +- print "Error showing image", exc_info()[0] ++ print("Error showing image", exc_info()[0]) + raise + if del_fig: + plt.close(fig) +@@ -1075,13 +1075,13 @@ class ProfileManager: + fig.set_size_inches(primaryWidth,primaryWidth) + plt.savefig(fileName,dpi=dpi,format=format) + except: +- print "Error saving image",fileName, exc_info()[0] ++ print("Error saving image",fileName, exc_info()[0]) + raise + else: + try: + plt.show() + except: +- print "Error showing image", exc_info()[0] ++ print("Error showing image", exc_info()[0]) + raise + + ############################################################################### +--- groopm/refine.py.orig 2014-11-26 01:01:33 UTC ++++ groopm/refine.py +@@ -87,11 +87,11 @@ from scipy.spatial import KDTree as kdt + from scipy.spatial.distance import cdist, squareform, pdist + + # GroopM imports +-from binManager import BinManager +-from ellipsoid import EllipsoidTool +-from PCA import PCA, Center +-import groopmExceptions as ge +-from som import SOM ++from .binManager import BinManager ++from .ellipsoid import EllipsoidTool ++from .PCA import PCA, Center ++from . import groopmExceptions as ge ++from .som import SOM + np_seterr(all='raise') + + ############################################################################### +@@ -150,23 +150,23 @@ class RefineEngine: + ignoreRanges=True + + if auto: +- print " Start automatic bin refinement" +- num_binned = len(self.PM.binnedRowIndices.keys()) ++ print(" Start automatic bin refinement") ++ num_binned = len(list(self.PM.binnedRowIndices.keys())) + perc = "%.2f" % round((float(num_binned)/float(self.PM.numContigs))*100,2) +- print " ",num_binned,"contigs across",len(self.BM.bins.keys()),"cores (",perc,"% )" ++ print(" ",num_binned,"contigs across",len(list(self.BM.bins.keys())),"cores (",perc,"% )") + + graph = self.autoRefineBins(timer, makeGraph=gf!="") + if graph is not None: +- print " Writing graph to:", gf ++ print(" Writing graph to:", gf) + try: + with open(gf, "w") as gv_fh: + gv_fh.write(graph) + except: +- print "Error writing graph to:", gf ++ print("Error writing graph to:", gf) + +- num_binned = len(self.PM.binnedRowIndices.keys()) ++ num_binned = len(list(self.PM.binnedRowIndices.keys())) + perc = "%.2f" % round((float(num_binned)/float(self.PM.numContigs))*100,2) +- print " ",num_binned,"contigs across",len(self.BM.bins.keys()),"cores (",perc,"% )" ++ print(" ",num_binned,"contigs across",len(list(self.BM.bins.keys())),"cores (",perc,"% )") + + if plotFinal != "": + bids = self.BM.getBids() +@@ -196,28 +196,28 @@ class RefineEngine: + user_option = self.promptOnPlotterRefine() + + if(user_option == 'Q'): +- print '\nBye!' ++ print('\nBye!') + return + + elif(user_option == 'C'): +- print "Select colormap:" +- print " 1. HSV" +- print " 2. Accent" +- print " 3. Blues" +- print " 4. Spectral" +- print " 5. Grayscale" +- print " 6. Discrete (14 colors)" +- print " 7. Discrete paired (14 colors)" ++ print("Select colormap:") ++ print(" 1. HSV") ++ print(" 2. Accent") ++ print(" 3. Blues") ++ print(" 4. Spectral") ++ print(" 5. Grayscale") ++ print(" 6. Discrete (14 colors)") ++ print(" 7. Discrete paired (14 colors)") + + bValid = False + while(not bValid): + try: +- colormap_id = int(raw_input(" Enter colormap number (e.g., 1): ")) ++ colormap_id = int(input(" Enter colormap number (e.g., 1): ")) + if colormap_id < 1 or colormap_id > 7: + raise ValueError('Invalid colormap id.') + bValid = True + except ValueError: +- print "Colormap must be specified as a number between 1 and 7." ++ print("Colormap must be specified as a number between 1 and 7.") + + if colormap_id == 1: + self.PM.setColorMap('HSV') +@@ -238,19 +238,19 @@ class RefineEngine: + if use_elipses: + ET = None + use_elipses = False +- print "\nEllipses off" ++ print("\nEllipses off") + else: + ET = self.ET + use_elipses = True +- print "\nEllipses on" ++ print("\nEllipses on") + + elif(user_option == 'X'): + if show_chimeric_bins: + show_chimeric_bins = False +- print "\nHiding likely chimeric bins." ++ print("\nHiding likely chimeric bins.") + else: + show_chimeric_bins = True +- print "\nShowing likely chimeric bins." ++ print("\nShowing likely chimeric bins.") + + elif(user_option == 'R'): + self.BM.plotBinIds(ignoreRanges=ignoreRanges, showChimeric=show_chimeric_bins) +@@ -277,7 +277,7 @@ class RefineEngine: + have_range = False + while(not have_range): + try: +- gc_range_str = raw_input(" Enter GC range to examine (e.g., 0.5-0.6): ") ++ gc_range_str = input(" Enter GC range to examine (e.g., 0.5-0.6): ") + + if '-' not in gc_range_str: + raise ValueError('Incorrectly formatted GC range.') +@@ -289,7 +289,7 @@ class RefineEngine: + + have_range = True + except ValueError: +- print "GC ranges must be entered as 'a-b' (e.g., 0.5-0.6)." ++ print("GC ranges must be entered as 'a-b' (e.g., 0.5-0.6).") + self.BM.plotBinIds(gc_range=gc_range, ignoreRanges=ignoreRanges) + + elif(user_option == 'B'): +@@ -299,17 +299,17 @@ class RefineEngine: + while(not have_bid): + have_bid = True + try: +- usr_bids = raw_input(" Enter 'space' seperated bin id(s) to plot: ") ++ usr_bids = input(" Enter 'space' seperated bin id(s) to plot: ") + bids = [int(i) for i in usr_bids.split(" ")] + if bids == [-1]: + bids = self.BM.getBids() + else: + for bid in bids: + if bid not in self.BM.bins: +- print "ERROR: Bin %d not found!" % bid ++ print("ERROR: Bin %d not found!" % bid) + have_bid &= False + except ValueError: +- print "You need to enter an integer value!" ++ print("You need to enter an integer value!") + + if len(bids) > 0: + self.BM.plotSelectBins(bids, plotMers=True, ET=ET) +@@ -320,22 +320,22 @@ class RefineEngine: + have_parts = False + while(not have_bid): + try: +- bid = int(raw_input(" Enter bid to split: ")) ++ bid = int(input(" Enter bid to split: ")) + if bid not in self.BM.bins: +- print "ERROR: Bin %d not found!" % bid ++ print("ERROR: Bin %d not found!" % bid) + else: + have_bid = True + except ValueError: +- print "You need to enter an integer value!" ++ print("You need to enter an integer value!") + while(not have_parts): + try: +- parts = int(raw_input(" Enter number of parts to split into: ")) ++ parts = int(input(" Enter number of parts to split into: ")) + if(parts < 2): +- print "ERROR: Need to choose 2 or more parts" ++ print("ERROR: Need to choose 2 or more parts") + else: + have_parts = True + except ValueError: +- print "You need to enter an integer value!" ++ print("You need to enter an integer value!") + self.BM.split(bid, + parts, + mode='kmer', +@@ -350,23 +350,23 @@ class RefineEngine: + have_radius = False + while(not have_bid): + try: +- bid = int(raw_input(" Enter bid of interest: ")) ++ bid = int(input(" Enter bid of interest: ")) + if bid not in self.BM.bins: +- print "ERROR: Bin %d not found!" % bid ++ print("ERROR: Bin %d not found!" % bid) + else: + have_bid = True + except ValueError: +- print "You need to enter an integer value!" ++ print("You need to enter an integer value!") + while(not have_radius): + try: +- usr_radius = raw_input(" Enter radius to select from [default 100]: ") ++ usr_radius = input(" Enter radius to select from [default 100]: ") + if usr_radius == "": + radius = 100 + else: + radius = int(usr_radius) + have_radius = True + except ValueError: +- print "You need to enter an integer value!" ++ print("You need to enter an integer value!") + + # we need to find all points in an area about the centroid of + # this bin +@@ -397,9 +397,9 @@ class RefineEngine: + # reshape + disp_vals = np_reshape(disp_vals, (num_points, 3)) + +- print " Points are located in bins:" ++ print(" Points are located in bins:") + for seen_bid in seen_bids: +- print " %d - %d occurances" % (seen_bid, len(seen_bids[seen_bid])) ++ print(" %d - %d occurances" % (seen_bid, len(seen_bids[seen_bid]))) + + fig = plt.figure() + ax = fig.add_subplot(1,1,1, projection='3d') +@@ -423,7 +423,7 @@ class RefineEngine: + try: + plt.show() + except: +- print "Error showing image:", sys.exc_info()[0] ++ print("Error showing image:", sys.exc_info()[0]) + raise + plt.close(fig) + del fig +@@ -459,7 +459,7 @@ class RefineEngine: + # identify and remove outlier bins + if markLikelyChimeric: + nuked = self.markLikelyChimericBins() +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + + if makeGraph: +@@ -472,7 +472,7 @@ class RefineEngine: + # merge bins together + if mergeSimilarBins: + self.mergeSimilarBins(graph=graph, verbose=False) +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + + if plotAfterOB: +@@ -485,12 +485,12 @@ class RefineEngine: + self.PM.contigGCs, + self.PM.contigLengths) + self.BM.plotBins(FNPrefix="AFTER_OB", ET=self.ET) +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + + if shuffleRefine: + nuked = self.shuffleRefineContigs(timer) +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + if makeGraph: + # Make sure we know these guys were deleted +@@ -501,7 +501,7 @@ class RefineEngine: + + if removeDuds: + nuked = self.removeDuds() +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + if makeGraph: + # Make sure we know these guys were deleted +@@ -516,7 +516,7 @@ class RefineEngine: + def markLikelyChimericBins(self, verbose=False): + """ Identify bins which contain mixed genomes based on GC. + Small bins are nuked, large bins are flagged as chimeric. """ +- print " Identifying possible chimeric bins" ++ print(" Identifying possible chimeric bins") + sys_stdout.flush() + + # first we need to build a distribution! +@@ -553,8 +553,8 @@ class RefineEngine: + freeBinnedRowIndices=True, + saveBins=False) + +- print " Identified %d likely chimeric bin(s), removed %d small chimeric bin(s)" % (num_chimeric_bins, len(dead_bins)) +- print " %s" % ",".join(str(u) for u in dead_bins) ++ print(" Identified %d likely chimeric bin(s), removed %d small chimeric bin(s)" % (num_chimeric_bins, len(dead_bins))) ++ print(" %s" % ",".join(str(u) for u in dead_bins)) + return dead_bins + + def mergeSimilarBins(self, verbose=False, graph=None, silent=False): +@@ -565,7 +565,7 @@ class RefineEngine: + orig_num_bins = len(self.BM.getNonChimericBinIds()) + + if not silent: +- print " Merging similar bins (%d) with kCut %0.2f (+/-%0.3f) cCut %0.2f (+/-%0.3f)" % (orig_num_bins, kCutMedian, kCutStd, cCutMedian, cCutStd) ++ print(" Merging similar bins (%d) with kCut %0.2f (+/-%0.3f) cCut %0.2f (+/-%0.3f)" % (orig_num_bins, kCutMedian, kCutStd, cCutMedian, cCutStd)) + + # identify merging groups and then merge them + mergers = self.findMergeGroups(kCutMedian, kCutStd, cCutMedian, cCutStd, verbose=verbose) +@@ -575,7 +575,7 @@ class RefineEngine: + bins_removed = self.combineMergers(merge, kCutMedian, kCutStd, cCutMedian, cCutStd, graph=graph) + num_bins_removed += len(bins_removed) + if not silent: +- print " Merged %d of %d cores leaving %d cores total" % (num_bins_removed, orig_num_bins, len(self.BM.getNonChimericBinIds())) ++ print(" Merged %d of %d cores leaving %d cores total" % (num_bins_removed, orig_num_bins, len(self.BM.getNonChimericBinIds()))) + + return num_bins_removed + +@@ -666,10 +666,10 @@ class RefineEngine: + common_neighbors = set(cov_neighbor_list).intersection(set(kmer_neighbor_list)) + + if verbose: +- print "++++++++++" +- print bid, cov_neighbor_list +- print bid, kmer_neighbor_list +- print bid, common_neighbors ++ print("++++++++++") ++ print(bid, cov_neighbor_list) ++ print(bid, kmer_neighbor_list) ++ print(bid, common_neighbors) + + # test each neighbor in turn + for i, neighbor_index in enumerate(common_neighbors): +@@ -680,8 +680,8 @@ class RefineEngine: + merged_query_bid = merged_bins[merged_query_bid] + + if verbose: +- print "++++++++++" +- print base_bid, query_bid, merged_base_bid, merged_query_bid ++ print("++++++++++") ++ print(base_bid, query_bid, merged_base_bid, merged_query_bid) + #----- + # TIME WASTERS + +@@ -689,7 +689,7 @@ class RefineEngine: + seen_key = self.BM.makeBidKey(base_bid, query_bid) + if(seen_key in processed_pairs or merged_base_bid == merged_query_bid): + if verbose: +- print "TW" ++ print("TW") + continue + processed_pairs[seen_key] = True + +@@ -708,7 +708,7 @@ class RefineEngine: + ) + if lengths_wrong: + if verbose: +- print "LW" ++ print("LW") + continue + + #----- +@@ -719,15 +719,15 @@ class RefineEngine: + c_dist_bw = self.cDistBetweenBins(base_bin, query_bin) + + if verbose: +- print 'k_dist_bw, c_dist_bw' +- print k_dist_bw, c_dist_bw +- print '---------------------' ++ print('k_dist_bw, c_dist_bw') ++ print(k_dist_bw, c_dist_bw) ++ print('---------------------') + + + if k_dist_bw < kCutMedian and c_dist_bw < cCutMedian: + if verbose: +- print 'MERGED' +- print '---------------------' ++ print('MERGED') ++ print('---------------------') + + if merged_query_bid < merged_base_bid: + merged_bins[merged_base_bid] = merged_query_bid +@@ -773,7 +773,7 @@ class RefineEngine: + + if not INTT: + if verbose: +- print "KINTT" ++ print("KINTT") + continue + #----- + # MINIMUM BOUNDING COVERAGE ELLIPSOID +@@ -802,7 +802,7 @@ class RefineEngine: + + if not intersects: + if verbose: +- print "CINTT" ++ print("CINTT") + continue + + # We only get here if we're going to merge the bins +@@ -869,7 +869,7 @@ class RefineEngine: + cur_bin = self.BM.getBin(cur_bid) + + dists = [] +- for i in xrange(1, len(sorted_bid)): ++ for i in range(1, len(sorted_bid)): + frag_bid = sorted_bid[i] + frag_bin = self.BM.getBin(frag_bid) + +@@ -1069,14 +1069,14 @@ class RefineEngine: + iterations=800, + silent=silent, + weightImgFileNamePrefix=animateFilePrefix) +- print " --" +- print " %s" % timer.getTimeStamp() ++ print(" --") ++ print(" %s" % timer.getTimeStamp()) + if render: + SS.renderWeights("S1") + + if maskBoundaries: + if not silent: +- print " Creating boundary mask" ++ print(" Creating boundary mask") + # make a boundary mask + if render: + SS.makeBoundaryMask(plotMaskFile="S2.png") +@@ -1086,23 +1086,23 @@ class RefineEngine: + if defineBins: + # assign regions on som surface to specific bins + if not silent: +- print " Defining bin regions" ++ print(" Defining bin regions") + SS.defineBinRegions(bids, training_data, render=render) + if render: + SS.renderBoundaryMask("S5.png") + if maskBoundaries: + # mask out regions where we don't like it + if not silent: +- print " Masking SOM classifier" ++ print(" Masking SOM classifier") + SS.maskBoundaries(addNoise=False, doFlat=True) + if render: + SS.renderWeights("S6") + +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + if retrain: + # retrain bin regions using contigs from the bin + if not silent: +- print " Retraining SOM classifier" ++ print(" Retraining SOM classifier") + for i in range(len(bids)): + bid = bids[i] + sys_stdout.write("\r Retraining on bin: %d (%d of %d)" % (bid, i+1, len(bids))) +@@ -1117,7 +1117,7 @@ class RefineEngine: + render=render) + if render: + SS.renderWeights("gg") +- print " --" ++ print(" --") + + if render: + SS.renderWeights("S7") +@@ -1148,8 +1148,8 @@ class RefineEngine: + + # now we'd like to centre the weights and mask within an + # appropriately sized square +- min_p = np_min(maskPoints.keys(), axis=0) +- max_p = np_max(maskPoints.keys(), axis=0) ++ min_p = np_min(list(maskPoints.keys()), axis=0) ++ max_p = np_max(list(maskPoints.keys()), axis=0) + diffs = max_p - min_p + small_side = np_min(diffs) + sweights = np_copy(SS.weights.nodes[min_p[0]:min_p[0]+diffs[0]+1,min_p[1]:min_p[1]+diffs[1]+1]) +@@ -1158,7 +1158,7 @@ class RefineEngine: + # shift and mask out all other bins + shifted_mask_points = {} + shifted_bin_mask = np_ones((diffs[0]+1,diffs[1]+1)) +- for (r,c) in maskPoints.keys(): ++ for (r,c) in list(maskPoints.keys()): + shift = maskPoints[(r,c)] - min_p + shifted_bin_mask[shift[0],shift[1]] = 0 + shifted_mask_points[(shift[0], shift[1])] = shift +@@ -1174,7 +1174,7 @@ class RefineEngine: + + #SS.weights.renderSurface("D_%d.png"%bid, nodes=sweights) + # update the torusMesh values appropriately +- for (r,c) in maskPoints.keys(): ++ for (r,c) in list(maskPoints.keys()): + shift = maskPoints[(r,c)] - min_p + SS.weights.nodes[r,c] = sweights[shift[0], shift[1]] + SS.weights.fixFlatNodes() +@@ -1185,7 +1185,7 @@ class RefineEngine: + + def shuffleRefineContigs(self, timer, inclusivity=2): + """refine bins by shuffling contigs around""" +- print " Start shuffle refinement" ++ print(" Start shuffle refinement") + + # first, build a SOM + bids = self.BM.getBids() +@@ -1198,7 +1198,7 @@ class RefineEngine: + defineBins=True, + retrain=True) + +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + + # now do the shuffle refinement, keep an eye out for + new_assignments = {} +@@ -1280,30 +1280,30 @@ class RefineEngine: + nones[old_bid] = 1 + + +- if False: +- print " ------------------------------------------------------" +- print " BID ORIG CHGE SAME NEWS NONE TOTAL" +- print " ------------------------------------------------------" ++ if False: ++ print(" ------------------------------------------------------") ++ print(" BID ORIG CHGE SAME NEWS NONE TOTAL") ++ print(" ------------------------------------------------------") + for bid in bids: +- print " %4d %5d " % (bid, self.BM.bins[bid].binSize), ++ print(" %4d %5d " % (bid, self.BM.bins[bid].binSize), end=' ') + if bid in wrongs: +- print "%04d " % wrongs[bid], ++ print("%04d " % wrongs[bid], end=' ') + else: +- print "0000 ", ++ print("0000 ", end=' ') + if bid in rights: +- print "%04d " % rights[bid], ++ print("%04d " % rights[bid], end=' ') + else: +- print "0000 ", ++ print("0000 ", end=' ') + if bid in news: +- print "%04d " % news[bid], ++ print("%04d " % news[bid], end=' ') + else: +- print "0000 ", ++ print("0000 ", end=' ') + if bid in nones: +- print "%04d " % nones[bid], ++ print("%04d " % nones[bid], end=' ') + else: +- print "0000 ", +- print "%04d " % len(new_assignments[bid]) +- print "\n ---------------------------------------------" ++ print("0000 ", end=' ') ++ print("%04d " % len(new_assignments[bid])) ++ print("\n ---------------------------------------------") + + # now get ready for saving. + # first, we nuke all non-chimeric bins +@@ -1335,7 +1335,7 @@ class RefineEngine: + + def removeDuds(self, ms=20, mv=1000000, verbose=False): + """Run this after refining to remove scrappy leftovers""" +- print " Removing dud cores (min %d contigs or %d bp)" % (ms, mv) ++ print(" Removing dud cores (min %d contigs or %d bp)" % (ms, mv)) + deleters = [] + for bid in self.BM.getBids(): + self.BM.bins[bid] +@@ -1343,13 +1343,13 @@ class RefineEngine: + # delete this chap! + deleters.append(bid) + if verbose: +- print "duds", deleters ++ print("duds", deleters) + if len(deleters) > 0: + self.BM.deleteBins(deleters, + force=True, + freeBinnedRowIndices=True, + saveBins=False) +- print " Removed %d cores leaving %d cores" % (len(deleters), len(self.BM.bins)) ++ print(" Removed %d cores leaving %d cores" % (len(deleters), len(self.BM.bins))) + return deleters + + #------------------------------------------------------------------------------ +@@ -1509,7 +1509,7 @@ class RefineEngine: + step_size = float(len(row_indices)) / sample_size + si = [] + index = 0.0 +- for _i in xrange(0, sample_size): ++ for _i in range(0, sample_size): + si.append(row_indices[sorted_indices[int(index)]]) + index += step_size + +@@ -1568,10 +1568,10 @@ class RefineEngine: + indices2 = bin2.rowIndices + + angles = [] +- for i in xrange(0, min(len(bin1.rowIndices), max_in_bin)): ++ for i in range(0, min(len(bin1.rowIndices), max_in_bin)): + r1 = indices1[i] + +- for j in xrange(0, min(len(bin2.rowIndices), max_in_bin)): ++ for j in range(0, min(len(bin2.rowIndices), max_in_bin)): + r2 = indices2[j] + try: + ang = np_arccos(np_dot(self.PM.covProfiles[r1], self.PM.covProfiles[r2]) / +@@ -2051,7 +2051,7 @@ class RefineEngine: + + def recruitWrapper(self, timer, inclusivity=2, step=200, nukeAll=False, saveBins=False): + """Recuit more contigs to the bins""" +- print "Recruiting unbinned contigs" ++ print("Recruiting unbinned contigs") + + # make a list of all the cov and kmer vals + total_expanded = 0 +@@ -2090,9 +2090,9 @@ class RefineEngine: + + # talk to the user + perc_binned = float(total_binned)/float(total_contigs) +- print " Planned steps = ", steps +- print " BEGIN: %0.4f" % perc_binned +"%"+" of %d requested contigs in bins" % total_contigs +- print " %d contigs unbinned" % total_unbinned ++ print(" Planned steps = ", steps) ++ print(" BEGIN: %0.4f" % perc_binned +"%"+" of %d requested contigs in bins" % total_contigs) ++ print(" %d contigs unbinned" % total_unbinned) + + # build the classifier on all the existing bins + (SS, minz, maxz, side) = self.buildSOM(timer, +@@ -2100,7 +2100,7 @@ class RefineEngine: + defineBins=True, + retrain=True) + +- print " %s" % timer.getTimeStamp() ++ print(" %s" % timer.getTimeStamp()) + + # go through the steps we decided on + affected_bids = list(np_copy(self.BM.getBids())) +@@ -2132,7 +2132,7 @@ class RefineEngine: + block -= minz + block /= maxz + +- print " Recruiting contigs above: %d (%d contigs)" % (cutoff, len(unbinned_rows)) ++ print(" Recruiting contigs above: %d (%d contigs)" % (cutoff, len(unbinned_rows))) + + for i in range(len(unbinned_rows)): + putative_bid = SS.classifyContig(block[i]) +@@ -2159,21 +2159,21 @@ class RefineEngine: + for row_index in new_binned: + del unbinned[row_index] + +- print " Recruited: %d contigs" % this_step_binned +- print " %s" % timer.getTimeStamp() ++ print(" Recruited: %d contigs" % this_step_binned) ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + + # talk to the user + perc_recruited = float(total_expanded)/float(total_unbinned) + perc_binned = float(total_binned)/float(total_contigs) +- print " Recruited %0.4f" % perc_recruited +"%"+" of %d unbinned contigs" % total_unbinned +- print " END: %0.4f" % perc_binned +"%"+" of %d requested contigs in bins" % total_contigs +- print " %s" % timer.getTimeStamp() ++ print(" Recruited %0.4f" % perc_recruited +"%"+" of %d unbinned contigs" % total_unbinned) ++ print(" END: %0.4f" % perc_binned +"%"+" of %d requested contigs in bins" % total_contigs) ++ print(" %s" % timer.getTimeStamp()) + sys_stdout.flush() + + # now save + if(saveBins): +- print "Saving bins" ++ print("Saving bins") + self.BM.saveBins() + + #------------------------------------------------------------------------------ +@@ -2183,7 +2183,7 @@ class RefineEngine: + """Output a valid graphviz dot file""" + op = "digraph refine {\n" + # render nodes +- for bid in graph[0].keys(): ++ for bid in list(graph[0].keys()): + op += graph[0][bid] + # render edges + op += "\n".join(graph[1]) +@@ -2191,7 +2191,7 @@ class RefineEngine: + return op + + def printRefinePlotterInstructions(self): +- raw_input( "****************************************************************\n" ++ input( "****************************************************************\n" + " REFINING INSTRUCTIONS - PLEASE READ CAREFULLY\n"+ + "****************************************************************\n" + " You have chosen to refine in plotter mode. Congratulations!\n" +@@ -2201,7 +2201,7 @@ class RefineEngine: + " Follow the instructions to merge or split these bins\n\n" + " Good Luck!\n\n" + " Press return to continue...") +- print "****************************************************************" ++ print("****************************************************************") + + def promptOnPlotterRefine(self, minimal=False): + """Find out what the user wishes to do next when refining bins""" +@@ -2210,9 +2210,9 @@ class RefineEngine: + vrs = ",".join([str.lower(str(x)) for x in valid_responses]) + while(input_not_ok): + if(minimal): +- option = raw_input(" What next? ("+vrs+") : ") ++ option = input(" What next? ("+vrs+") : ") + else: +- option = raw_input("\n Please choose from the following options:\n" \ ++ option = input("\n Please choose from the following options:\n" \ + "------------------------------------------------------------\n" \ + " r = plot entire space using bin ids\n" \ + " p = plot entire space with bins as points\n" \ +@@ -2231,7 +2231,7 @@ class RefineEngine: + if(option.upper() in valid_responses): + return option.upper() + else: +- print "Error, unrecognised choice '"+option+"'" ++ print("Error, unrecognised choice '"+option+"'") + minimal=True + + def PCA2Col(self, PCAs): +@@ -2409,7 +2409,7 @@ class GrubbsTester: + idx = 999 + + if verbose: +- print np_mean(compVals+[maxVal]), np_std(compVals+[maxVal], ddof=1), maxVal, v, idx, self.critVs[idx], v > self.critVs[idx] ++ print(np_mean(compVals+[maxVal]), np_std(compVals+[maxVal], ddof=1), maxVal, v, idx, self.critVs[idx], v > self.critVs[idx]) + + return v > self.critVs[idx] + +--- groopm/som.py.orig 2014-11-26 01:01:33 UTC ++++ groopm/som.py +@@ -1,5 +1,5 @@ + #!/usr/bin/env python +-from __future__ import division ++ + ############################################################################### + # # + # som.py # +@@ -77,12 +77,13 @@ from math import log, exp + import numpy as np + from scipy.spatial.distance import cdist + from PIL import Image, ImageDraw ++from functools import reduce + np.seterr(all='raise') + + # GroopM imports +-from torusMesh import TorusMesh as TM +-from rainbow import Rainbow +-import groopmExceptions as ge ++from .torusMesh import TorusMesh as TM ++from .rainbow import Rainbow ++from . import groopmExceptions as ge + + ############################################################################### + ############################################################################### +@@ -193,7 +194,7 @@ class SOM: + # we only need to return a tuple + nt = self.makeNTuple(s_bid,q_bid) + neighbours[nt] = True +- return neighbours.keys() ++ return list(neighbours.keys()) + + def makeNTuple(self, bid1, bid2): + """A way for making standard tuples from bids""" +@@ -228,7 +229,7 @@ class SOM: + """ + + if not silent: +- print " Start SOM training. Side: %d Max: %d iterations" % (self.side, iterations) ++ print(" Start SOM training. Side: %d Max: %d iterations" % (self.side, iterations)) + + if radius == 0.0: + radius = self.radius +@@ -376,7 +377,7 @@ class SOM: + weights = np.clip(weights + deltasheet[rows:2*rows,cols:2*cols], 0, 1) + else: + delta_fold = deltasheet[rows:2*rows,cols:2*cols] +- for (r,c) in mask.keys(): ++ for (r,c) in list(mask.keys()): + weights[r,c] = np.clip(weights[r,c] + delta_fold[r,c], 0, 1) + flat_nodes = weights.reshape((rows*cols, self.dimension)) + +@@ -386,7 +387,7 @@ class SOM: + # make a tmp image, perhaps + if(weightImgFileNamePrefix != ""): + filename = "%s_%04d.jpg" % (weightImgFileNamePrefix, i) +- print " writing: %s" % filename ++ print(" writing: %s" % filename) + self.weights.renderSurface(filename) + + return weights +@@ -467,7 +468,7 @@ class SOM: + # get all the points within this region + points = self.floodFill(startR, startC, self.boundaryMask) + collision_bid = 0 +- for (r,c) in points.keys(): ++ for (r,c) in list(points.keys()): + if self.binAssignments[r,c] != 0: + if self.binAssignments[r,c] != bid: + # we have already assigned this point to a bin +@@ -487,7 +488,7 @@ class SOM: + # rebuild the mask with a new cutoff + mc = mc/2 + mask = np.copy(self.boundaryMask) +- for (r,c) in points.keys(): ++ for (r,c) in list(points.keys()): + if self.VS_flat[r,c] > mc: + mask[r,c] = 1. + else: +@@ -497,15 +498,15 @@ class SOM: + new_points = self.floodFill(startR, startC, mask) + #print len(collision_points.keys()), len(new_points.keys()) + #print collision_points.keys()[0] in new_points +- if len(collision_points.keys()) == 0 or len(new_points.keys()) == 0: ++ if len(list(collision_points.keys())) == 0 or len(list(new_points.keys())) == 0: + continue + # there should be no overlap +- if collision_points.keys()[0] not in new_points: ++ if list(collision_points.keys())[0] not in new_points: + # we have resolved the issue + resolved = True + # now we need to fix the binAssignments and boundary mask + self.boundaryMask = mask +- for (r,c) in points.keys(): ++ for (r,c) in list(points.keys()): + if (r,c) in new_points: + # assign this point to the new bid + self.binAssignments[r,c] = bid +@@ -517,7 +518,7 @@ class SOM: + break + + if not resolved: +- print "Cannot repair map, bin %d may be incorrectly merged with bin %d" % (bid, collision_bid) ++ print("Cannot repair map, bin %d may be incorrectly merged with bin %d" % (bid, collision_bid)) + return + + def makeBinMask(self, profile, fileName="", dim=False): +@@ -526,7 +527,7 @@ class SOM: + points = self.floodFill(r, c, self.boundaryMask) + if fileName != "": + ret_mask = np.ones_like(self.boundaryMask) +- for (r,c) in points.keys(): ++ for (r,c) in list(points.keys()): + ret_mask[r,c] = 0 + self.renderBoundaryMask(fileName, mask=ret_mask) + +@@ -600,7 +601,7 @@ class SOM: + img = img.resize((self.weights.columns*10, self.weights.rows*10),Image.NEAREST) + img.save(filename) + except: +- print sys.exc_info()[0] ++ print(sys.exc_info()[0]) + raise + + def renderBoundaryMask(self, fileName, mask=None, colMap=None): +@@ -625,7 +626,7 @@ class SOM: + img = img.resize((self.side*10, self.side*10),Image.NEAREST) + img.save(fileName) + except: +- print sys.exc_info()[0] ++ print(sys.exc_info()[0]) + raise + + def transColour(self, val): +@@ -659,7 +660,7 @@ class SOM: + img = img.resize((self.weights.columns*10, self.weights.rows*10),Image.NEAREST) + img.save(fileName) + except: +- print sys.exc_info()[0] ++ print(sys.exc_info()[0]) + raise + + ############################################################################### +--- groopm/torusMesh.py.orig 2014-11-26 01:01:33 UTC ++++ groopm/torusMesh.py +@@ -250,7 +250,7 @@ class TorusMesh: + img = img.resize((columns*10, rows*10),Image.NEAREST) + img.save(fileName) + except: +- print sys.exc_info()[0] ++ print(sys.exc_info()[0]) + raise + + ############################################################################### diff --git a/biology/libnuml/Makefile b/biology/libnuml/Makefile index 890a2e919ee..617d791e588 100644 --- a/biology/libnuml/Makefile +++ b/biology/libnuml/Makefile @@ -1,6 +1,7 @@ PORTNAME= libnuml DISTVERSIONPREFIX= v DISTVERSION= 1.1.4 +PORTREVISION= 1 CATEGORIES= biology devel MAINTAINER= yuri@FreeBSD.org diff --git a/biology/libsbml/Makefile b/biology/libsbml/Makefile index a682cbdb522..2110f2b20a9 100644 --- a/biology/libsbml/Makefile +++ b/biology/libsbml/Makefile @@ -2,6 +2,7 @@ PORTNAME= libsbml PORTVERSION= 5.19.0 +PORTREVISION= 1 CATEGORIES= biology devel MASTER_SITES= SF/sbml/libsbml/${PORTVERSION}/stable DISTNAME= libSBML-${PORTVERSION}-core-src diff --git a/biology/libsedml/Makefile b/biology/libsedml/Makefile index 556abc67860..c4989360265 100644 --- a/biology/libsedml/Makefile +++ b/biology/libsedml/Makefile @@ -1,6 +1,7 @@ PORTNAME= libsedml DISTVERSIONPREFIX= v DISTVERSION= 2.0.28 +PORTREVISION= 1 CATEGORIES= biology devel MAINTAINER= yuri@FreeBSD.org diff --git a/biology/ncbi-vdb/Makefile b/biology/ncbi-vdb/Makefile index 46cec897971..9bfd9e9dd7e 100644 --- a/biology/ncbi-vdb/Makefile +++ b/biology/ncbi-vdb/Makefile @@ -1,6 +1,6 @@ PORTNAME= ncbi-vdb DISTVERSION= 2.11.0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= biology MAINTAINER= mzaki@niid.go.jp diff --git a/biology/py-crossmap/files/patch-2to3 b/biology/py-crossmap/files/patch-2to3 new file mode 100644 index 00000000000..c3b680224b4 --- /dev/null +++ b/biology/py-crossmap/files/patch-2to3 @@ -0,0 +1,2955 @@ +--- lib/cmmodule/SAM.py.orig 2018-12-17 16:05:26 UTC ++++ lib/cmmodule/SAM.py +@@ -150,52 +150,52 @@ class ParseSAM: + forward_SE +=1 + + if paired: +- print >>sys.stderr,"\n#==================================================" +- print >>sys.stderr,"#================Report (pair-end)=================" +- print >>sys.stderr, "%-25s%d" % ("Total Reads:",total_read) +- print >>sys.stderr, "%-25s%d" % ("Total Mapped Reads:", (mapped_read1 + mapped_read2)) +- print >>sys.stderr, "%-25s%d" % ("Total Unmapped Reads:",(unmapped_read1 + unmapped_read2)) +- print >>sys.stderr, "%-25s%d" % ("PCR duplicate:",pcr_duplicate) +- print >>sys.stderr, "%-25s%d" % ("QC-failed:",low_qual) +- print >>sys.stderr, "%-25s%d" % ("Not primary mapping:",secondary_hit) +- print >>sys.stderr, "\n", +- print >>sys.stderr, "%-25s%d" % ("Unmapped Read-1:",unmapped_read1) +- print >>sys.stderr, "%-25s%d" % ("Mapped Read-1:",mapped_read1) +- print >>sys.stderr, "%-25s%d" % (" Forward (+):",forward_read1) +- print >>sys.stderr, "%-25s%d" % (" Reverse (-):",reverse_read1) ++ print("\n#==================================================", file=sys.stderr) ++ print("#================Report (pair-end)=================", file=sys.stderr) ++ print("%-25s%d" % ("Total Reads:",total_read), file=sys.stderr) ++ print("%-25s%d" % ("Total Mapped Reads:", (mapped_read1 + mapped_read2)), file=sys.stderr) ++ print("%-25s%d" % ("Total Unmapped Reads:",(unmapped_read1 + unmapped_read2)), file=sys.stderr) ++ print("%-25s%d" % ("PCR duplicate:",pcr_duplicate), file=sys.stderr) ++ print("%-25s%d" % ("QC-failed:",low_qual), file=sys.stderr) ++ print("%-25s%d" % ("Not primary mapping:",secondary_hit), file=sys.stderr) ++ print("\n", end=' ', file=sys.stderr) ++ print("%-25s%d" % ("Unmapped Read-1:",unmapped_read1), file=sys.stderr) ++ print("%-25s%d" % ("Mapped Read-1:",mapped_read1), file=sys.stderr) ++ print("%-25s%d" % (" Forward (+):",forward_read1), file=sys.stderr) ++ print("%-25s%d" % (" Reverse (-):",reverse_read1), file=sys.stderr) + +- print >>sys.stderr, "\n", +- print >>sys.stderr, "%-25s%d" % ("Unmapped Read-2:",unmapped_read2) +- print >>sys.stderr, "%-25s%d" % ("Mapped Read-2:",mapped_read2) +- print >>sys.stderr, "%-25s%d" % (" Forward (+):",forward_read2) +- print >>sys.stderr, "%-25s%d" % (" Reverse (-):",reverse_read2) ++ print("\n", end=' ', file=sys.stderr) ++ print("%-25s%d" % ("Unmapped Read-2:",unmapped_read2), file=sys.stderr) ++ print("%-25s%d" % ("Mapped Read-2:",mapped_read2), file=sys.stderr) ++ print("%-25s%d" % (" Forward (+):",forward_read2), file=sys.stderr) ++ print("%-25s%d" % (" Reverse (-):",reverse_read2), file=sys.stderr) + +- print >>sys.stderr, "\n", +- print >>sys.stderr, "%-25s%d" % ("Mapped to (+/-):",plus_minus) +- print >>sys.stderr, "%-25s%d" % ("Mapped to (-/+):",minus_plus) +- print >>sys.stderr, "%-25s%d" % ("Mapped to (+/+):",plus_plus) +- print >>sys.stderr, "%-25s%d" % ("Mapped to (-/-):",minus_minus) +- print >>sys.stderr, "\n", +- print >>sys.stderr, "%-25s%d" % ("Spliced Hits:",_numSplitHit) +- print >>sys.stderr, "%-25s%d" % ("Non-spliced Hits:",_numMonoHit) +- print >>sys.stderr, "%-25s%d" % ("Reads have insertion:",_numInsertion) +- print >>sys.stderr, "%-25s%d" % ("Reads have deletion:",_numDeletion) ++ print("\n", end=' ', file=sys.stderr) ++ print("%-25s%d" % ("Mapped to (+/-):",plus_minus), file=sys.stderr) ++ print("%-25s%d" % ("Mapped to (-/+):",minus_plus), file=sys.stderr) ++ print("%-25s%d" % ("Mapped to (+/+):",plus_plus), file=sys.stderr) ++ print("%-25s%d" % ("Mapped to (-/-):",minus_minus), file=sys.stderr) ++ print("\n", end=' ', file=sys.stderr) ++ print("%-25s%d" % ("Spliced Hits:",_numSplitHit), file=sys.stderr) ++ print("%-25s%d" % ("Non-spliced Hits:",_numMonoHit), file=sys.stderr) ++ print("%-25s%d" % ("Reads have insertion:",_numInsertion), file=sys.stderr) ++ print("%-25s%d" % ("Reads have deletion:",_numDeletion), file=sys.stderr) + else: +- print >>sys.stderr,"\n#====================================================" +- print >>sys.stderr,"#================Report (single-end)=================" +- print >>sys.stderr, "%-25s%d" % ("Total Reads:",total_read) +- print >>sys.stderr, "%-25s%d" % ("Total Mapped Reads:", map_SE) +- print >>sys.stderr, "%-25s%d" % ("Total Unmapped Reads:",unmap_SE) +- print >>sys.stderr, "%-25s%d" % ("PCR duplicate:",pcr_duplicate) +- print >>sys.stderr, "%-25s%d" % ("QC-failed:",low_qual) +- print >>sys.stderr, "%-25s%d" % ("Not primary mapping:",secondary_hit) +- print >>sys.stderr, "%-25s%d" % ("froward (+):",forward_SE) +- print >>sys.stderr, "%-25s%d" % ("reverse (-):",reverse_SE) +- print >>sys.stderr, "\n", +- print >>sys.stderr, "%-25s%d" % ("Spliced Hits:",_numSplitHit) +- print >>sys.stderr, "%-25s%d" % ("Non-spliced Hits:",_numMonoHit) +- print >>sys.stderr, "%-25s%d" % ("Reads have insertion:",_numInsertion) +- print >>sys.stderr, "%-25s%d" % ("Reads have deletion:",_numDeletion) ++ print("\n#====================================================", file=sys.stderr) ++ print("#================Report (single-end)=================", file=sys.stderr) ++ print("%-25s%d" % ("Total Reads:",total_read), file=sys.stderr) ++ print("%-25s%d" % ("Total Mapped Reads:", map_SE), file=sys.stderr) ++ print("%-25s%d" % ("Total Unmapped Reads:",unmap_SE), file=sys.stderr) ++ print("%-25s%d" % ("PCR duplicate:",pcr_duplicate), file=sys.stderr) ++ print("%-25s%d" % ("QC-failed:",low_qual), file=sys.stderr) ++ print("%-25s%d" % ("Not primary mapping:",secondary_hit), file=sys.stderr) ++ print("%-25s%d" % ("froward (+):",forward_SE), file=sys.stderr) ++ print("%-25s%d" % ("reverse (-):",reverse_SE), file=sys.stderr) ++ print("\n", end=' ', file=sys.stderr) ++ print("%-25s%d" % ("Spliced Hits:",_numSplitHit), file=sys.stderr) ++ print("%-25s%d" % ("Non-spliced Hits:",_numMonoHit), file=sys.stderr) ++ print("%-25s%d" % ("Reads have insertion:",_numInsertion), file=sys.stderr) ++ print("%-25s%d" % ("Reads have deletion:",_numDeletion), file=sys.stderr) + + def samTobed(self,outfile=None,mergePE=False): + """Convert SAM file to BED file. BED file will be saved as xxx.sam.bed unless otherwise specified. +@@ -204,7 +204,7 @@ class ParseSAM: + if outfile is None: + outfile=self.fileName + ".bed" + +- print >>sys.stderr,"\tWriting bed entries to\"",outfile,"\"...", ++ print("\tWriting bed entries to\"",outfile,"\"...", end=' ', file=sys.stderr) + FO=open(outfile,'w') + for line in self.f: + if line.startswith(('@','track')):continue #skip head lines +@@ -240,14 +240,14 @@ class ParseSAM: + for i in range(0,len(comb),2): + blockStart.append(str(sum(comb[:i]))) + blockStarts = ','.join(blockStart) +- print >>FO, string.join((str(i) for i in [chrom,chromStart,chromEnd,name,score,strand,thickStart,thickEnd,itemRgb,blockCount,blockSizes,blockStarts]),sep="\t") +- print >>sys.stderr, "Done" ++ print(string.join((str(i) for i in [chrom,chromStart,chromEnd,name,score,strand,thickStart,thickEnd,itemRgb,blockCount,blockSizes,blockStarts]),sep="\t"), file=FO) ++ print("Done", file=sys.stderr) + FO.close() + self.f.seek(0) + + if mergePE: + #creat another bed file. pair-end reads will be merged into single bed entry +- print >>sys.stderr, "Writing consoidated bed file ...", ++ print("Writing consoidated bed file ...", end=' ', file=sys.stderr) + bedfile = open(outfile,'r') + outfile_2 = outfile + ".consolidate.bed" + outfile_3 = outfile + '.filter' +@@ -292,11 +292,11 @@ class ParseSAM: + if(blocks[key] ==1): #single end, single hit + st = [i - txSt[key] for i in starts[key]] + st = string.join([str(i) for i in st],',') +- print >>FO, chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key,"\t","11\t",strand[key][0],"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sizes[key],','),"\t",st ++ print(chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key,"\t","11\t",strand[key][0],"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sizes[key],','),"\t",st, file=FO) + else: + st = [i - txSt[key] for i in starts[key]] #single end, spliced hit + st = string.join([str(i) for i in st],',') +- print >>FO, chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key,"\t","12\t",strand[key][0],"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sizes[key],','),"\t",st ++ print(chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key,"\t","12\t",strand[key][0],"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sizes[key],','),"\t",st, file=FO) + + elif(count[key]==2): #pair-end read + direction = string.join(strand[key],'/') +@@ -306,17 +306,17 @@ class ParseSAM: + #st=[string.atoi(i) for i in st] + if(len(chr[key])==1): #pair-end reads mapped to same chromosome + if blocks[key] ==2: #pair end, single hits +- print >>FO, chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key + "|strand=" + direction + "|chrom=same","\t","21\t",'.',"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sz,','),"\t",string.join([str(i) for i in st],',') ++ print(chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key + "|strand=" + direction + "|chrom=same","\t","21\t",'.',"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sz,','),"\t",string.join([str(i) for i in st],','), file=FO) + elif blocks[key] >2: # +- print >>FO, chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key + "|strand=" + direction + "|chrom=same","\t","22\t",'.',"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sz,','),"\t",string.join([str(i) for i in st],',') ++ print(chr[key].pop(),"\t",txSt[key],"\t",txEnd[key],"\t",key + "|strand=" + direction + "|chrom=same","\t","22\t",'.',"\t",txSt[key],"\t",txEnd[key],"\t","0,255,0\t",blocks[key],"\t",string.join(sz,','),"\t",string.join([str(i) for i in st],','), file=FO) + else: +- print >>FOF,key,"\t","pair-end mapped, but two ends mapped to different chromosome" ++ print(key,"\t","pair-end mapped, but two ends mapped to different chromosome", file=FOF) + elif(count[key] >2): #reads occur more than 2 times +- print >>FOF,key,"\t","occurs more than 2 times in sam file" ++ print(key,"\t","occurs more than 2 times in sam file", file=FOF) + continue + FO.close() + FOF.close() +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + + def samTowig(self,outfile=None,log2scale=False,header=False,strandSpecific=False): +@@ -326,7 +326,7 @@ class ParseSAM: + if outfile is None: + outfile = self.fileName + ".wig" + FO=open(outfile,'w') +- print >>sys.stderr, "Writing wig file to\"",outfile,"\"..." ++ print("Writing wig file to\"",outfile,"\"...", file=sys.stderr) + + headline="track type=wiggle_0 name=" + outfile + " track_label description='' visibility=full color=255,0,0" + wig=collections.defaultdict(dict) +@@ -359,24 +359,24 @@ class ParseSAM: + + blocks = cigar.fetch_exon(chrom,txStart,field[5]) + for block in blocks: +- hits.extend(range(block[1]+1,block[2]+1)) ++ hits.extend(list(range(block[1]+1,block[2]+1))) + + if strandSpecific is not True: + for i in hits: +- if wig[chrom].has_key(i): ++ if i in wig[chrom]: + wig[chrom][i] +=1 + else: + wig[chrom][i]=1 + else: + if strand_rule[read_type + strand] == '-': + for i in hits: +- if Nwig[chrom].has_key(i): ++ if i in Nwig[chrom]: + Nwig[chrom][i] += 1 + else: + Nwig[chrom][i] = 1 + if strand_rule[read_type + strand] == '+': + for i in hits: +- if Pwig[chrom].has_key(i): ++ if i in Pwig[chrom]: + Pwig[chrom][i] +=1 + else: + Pwig[chrom][i]=1 +@@ -385,17 +385,17 @@ class ParseSAM: + + if strandSpecific is not True: + for chr in sorted(wig.keys()): +- print >>sys.stderr, "Writing ",chr, " ..." ++ print("Writing ",chr, " ...", file=sys.stderr) + FO.write('variableStep chrom='+chr+'\n') + for coord in sorted(wig[chr]): + if log2scale:FO.write("%d\t%5.3f\n" % (coord,math.log(wig[chr][coord],2))) + else:FO.write("%d\t%d\n" % (coord,wig[chr][coord])) + else: +- chroms=set(Pwig.keys() + Nwig.keys()) ++ chroms=set(list(Pwig.keys()) + list(Nwig.keys())) + for chr in sorted(chroms): +- print >>sys.stderr, "Writing ",chr, " ..." ++ print("Writing ",chr, " ...", file=sys.stderr) + FO.write('variableStep chrom='+chr+'\n') +- coords=sorted(set(Pwig[chr].keys() + Nwig[chr].keys())) ++ coords=sorted(set(list(Pwig[chr].keys()) + list(Nwig[chr].keys()))) + for coord in coords: + if ((coord in Pwig[chr]) and (coord not in Nwig[chr])): + FO.write("%d\t%d\n" % (coord,Pwig[chr][coord])) +@@ -418,7 +418,7 @@ class ParseSAM: + else: outfile = self.fileName + ".unmap.fa" + FO=open(outfile,'w') + unmapCount=0 +- print >>sys.stderr, "Writing unmapped reads to\"",outfile,"\"... ", ++ print("Writing unmapped reads to\"",outfile,"\"... ", end=' ', file=sys.stderr) + + for line in self.f: + hits=[] +@@ -438,7 +438,7 @@ class ParseSAM: + if fastq: FO.write('@' + seqID + '\n' + seq +'\n' + '+' +'\n' + qual+'\n') + else: FO.write('>' + seqID + '\n' + seq +'\n') + +- print >>sys.stderr, str(unmapCount) + " reads saved!\n" ++ print(str(unmapCount) + " reads saved!\n", file=sys.stderr) + FO.close() + self.f.seek(0) + +@@ -449,7 +449,7 @@ class ParseSAM: + outfile = self.fileName + ".PP.sam" + FO=open(outfile,'w') + PPcount=0 +- print >>sys.stderr, "Writing proper paired reads to\"",outfile,"\"... ", ++ print("Writing proper paired reads to\"",outfile,"\"... ", end=' ', file=sys.stderr) + for line in self.f: + hits=[] + if line[0] == '@':continue #skip head lines +@@ -460,7 +460,7 @@ class ParseSAM: + PPcount +=1 + FO.write(line) + FO.close() +- print >>sys.stderr, str(PPcount) + " reads were saved!\n", ++ print(str(PPcount) + " reads were saved!\n", end=' ', file=sys.stderr) + self.f.seek(0) + + def samNVC(self,outfile=None): +@@ -481,7 +481,7 @@ class ParseSAM: + c_count=[] + g_count=[] + t_count=[] +- print >>sys.stderr, "reading sam file ... " ++ print("reading sam file ... ", file=sys.stderr) + for line in self.f: + if line.startswith('@'):continue #skip head lines + if ParseSAM._reExpr2.match(line):continue #skip blank lines +@@ -492,44 +492,44 @@ class ParseSAM: + RNA_read = field[9].upper() + else: + RNA_read = field[9].upper().translate(transtab)[::-1] +- for i in xrange(len(RNA_read)): ++ for i in range(len(RNA_read)): + key = str(i) + RNA_read[i] + base_freq[key] += 1 + +- print >>sys.stderr, "generating data matrix ..." +- print >>FO, "Position\tA\tC\tG\tT\tN\tX" +- for i in xrange(len(RNA_read)): +- print >>FO, str(i) + '\t', +- print >>FO, str(base_freq[str(i) + "A"]) + '\t', ++ print("generating data matrix ...", file=sys.stderr) ++ print("Position\tA\tC\tG\tT\tN\tX", file=FO) ++ for i in range(len(RNA_read)): ++ print(str(i) + '\t', end=' ', file=FO) ++ print(str(base_freq[str(i) + "A"]) + '\t', end=' ', file=FO) + a_count.append(str(base_freq[str(i) + "A"])) +- print >>FO, str(base_freq[str(i) + "C"]) + '\t', ++ print(str(base_freq[str(i) + "C"]) + '\t', end=' ', file=FO) + c_count.append(str(base_freq[str(i) + "C"])) +- print >>FO, str(base_freq[str(i) + "G"]) + '\t', ++ print(str(base_freq[str(i) + "G"]) + '\t', end=' ', file=FO) + g_count.append(str(base_freq[str(i) + "G"])) +- print >>FO, str(base_freq[str(i) + "T"]) + '\t', ++ print(str(base_freq[str(i) + "T"]) + '\t', end=' ', file=FO) + t_count.append(str(base_freq[str(i) + "T"])) +- print >>FO, str(base_freq[str(i) + "N"]) + '\t', +- print >>FO, str(base_freq[str(i) + "X"]) + '\t' ++ print(str(base_freq[str(i) + "N"]) + '\t', end=' ', file=FO) ++ print(str(base_freq[str(i) + "X"]) + '\t', file=FO) + FO.close() + + #generating R scripts +- print >>sys.stderr, "generating R script ..." +- print >>RS, "position=c(" + ','.join([str(i) for i in xrange(len(RNA_read))]) + ')' +- print >>RS, "A_count=c(" + ','.join(a_count) + ')' +- print >>RS, "C_count=c(" + ','.join(c_count) + ')' +- print >>RS, "G_count=c(" + ','.join(g_count) + ')' +- print >>RS, "T_count=c(" + ','.join(t_count) + ')' +- print >>RS, "total= A_count + C_count + G_count + T_count" +- print >>RS, "ym=max(A_count/total,C_count/total,G_count/total,T_count/total) + 0.05" +- print >>RS, "yn=min(A_count/total,C_count/total,G_count/total,T_count/total)" ++ print("generating R script ...", file=sys.stderr) ++ print("position=c(" + ','.join([str(i) for i in range(len(RNA_read))]) + ')', file=RS) ++ print("A_count=c(" + ','.join(a_count) + ')', file=RS) ++ print("C_count=c(" + ','.join(c_count) + ')', file=RS) ++ print("G_count=c(" + ','.join(g_count) + ')', file=RS) ++ print("T_count=c(" + ','.join(t_count) + ')', file=RS) ++ print("total= A_count + C_count + G_count + T_count", file=RS) ++ print("ym=max(A_count/total,C_count/total,G_count/total,T_count/total) + 0.05", file=RS) ++ print("yn=min(A_count/total,C_count/total,G_count/total,T_count/total)", file=RS) + +- print >>RS, 'pdf("NVC_plot.pdf")' +- print >>RS, 'plot(position,A_count/total,type="o",pch=20,ylim=c(yn,ym),col="dark green",xlab="Position of Read",ylab="Nucleotide Frequency")' +- print >>RS, 'lines(position,T_count/total,type="o",pch=20,col="red")' +- print >>RS, 'lines(position,G_count/total,type="o",pch=20,col="blue")' +- print >>RS, 'lines(position,C_count/total,type="o",pch=20,col="cyan")' +- print >>RS, 'legend('+ str(len(RNA_read)-10) + ',ym,legend=c("A","T","G","C"),col=c("dark green","red","blue","cyan"),lwd=2,pch=20,text.col=c("dark green","red","blue","cyan"))' +- print >>RS, "dev.off()" ++ print('pdf("NVC_plot.pdf")', file=RS) ++ print('plot(position,A_count/total,type="o",pch=20,ylim=c(yn,ym),col="dark green",xlab="Position of Read",ylab="Nucleotide Frequency")', file=RS) ++ print('lines(position,T_count/total,type="o",pch=20,col="red")', file=RS) ++ print('lines(position,G_count/total,type="o",pch=20,col="blue")', file=RS) ++ print('lines(position,C_count/total,type="o",pch=20,col="cyan")', file=RS) ++ print('legend('+ str(len(RNA_read)-10) + ',ym,legend=c("A","T","G","C"),col=c("dark green","red","blue","cyan"),lwd=2,pch=20,text.col=c("dark green","red","blue","cyan"))', file=RS) ++ print("dev.off()", file=RS) + + RS.close() + #self.f.seek(0) +@@ -546,7 +546,7 @@ class ParseSAM: + RS=open(outfile2,'w') + + gc_hist=collections.defaultdict(int) #key is GC percent, value is count of reads +- print >>sys.stderr, "reading sam file ... " ++ print("reading sam file ... ", file=sys.stderr) + for line in self.f: + if line[0] == '@':continue #skip head lines + if ParseSAM._reExpr2.match(line):continue #skip blank lines +@@ -556,18 +556,18 @@ class ParseSAM: + #print gc_percent + gc_hist[gc_percent] += 1 + +- print >>sys.stderr, "writing GC content ..." ++ print("writing GC content ...", file=sys.stderr) + +- print >>FO, "GC%\tread_count" +- for i in gc_hist.keys(): +- print >>FO, i + '\t' + str(gc_hist[i]) ++ print("GC%\tread_count", file=FO) ++ for i in list(gc_hist.keys()): ++ print(i + '\t' + str(gc_hist[i]), file=FO) + +- print >>sys.stderr, "writing R script ..." +- print >>RS, "pdf('GC_content.pdf')" +- print >>RS, 'gc=rep(c(' + ','.join([i for i in gc_hist.keys()]) + '),' + 'times=c(' + ','.join([str(i) for i in gc_hist.values()]) + '))' +- print >>RS, 'hist(gc,probability=T,breaks=%d,xlab="GC content (%%)",ylab="Density of Reads",border="blue",main="")' % 100 ++ print("writing R script ...", file=sys.stderr) ++ print("pdf('GC_content.pdf')", file=RS) ++ print('gc=rep(c(' + ','.join([i for i in list(gc_hist.keys())]) + '),' + 'times=c(' + ','.join([str(i) for i in list(gc_hist.values())]) + '))', file=RS) ++ print('hist(gc,probability=T,breaks=%d,xlab="GC content (%%)",ylab="Density of Reads",border="blue",main="")' % 100, file=RS) + #print >>RS, "lines(density(gc),col='red')" +- print >>RS ,"dev.off()" ++ print("dev.off()", file=RS) + #self.f.seek(0) + + def samDupRate(self,outfile=None,up_bound=500): +@@ -589,7 +589,7 @@ class ParseSAM: + + seqDup_count=collections.defaultdict(int) + posDup_count=collections.defaultdict(int) +- print >>sys.stderr, "reading sam file ... " ++ print("reading sam file ... ", file=sys.stderr) + for line in self.f: + if line[0] == '@':continue #skip head lines + if ParseSAM._reExpr2.match(line):continue #skip blank lines +@@ -616,37 +616,37 @@ class ParseSAM: + coord = chrom + ":" + str(chromStart) + "-" + str(chromEnd) + ":" + blockSizes + ":" + blockStarts + posDup[coord] +=1 + +- print >>sys.stderr, "report duplicte rate based on sequence ..." +- print >>SEQ, "Occurrence\tUniqReadNumber" +- for i in seqDup.values(): #key is occurence, value is uniq reads number (based on seq) ++ print("report duplicte rate based on sequence ...", file=sys.stderr) ++ print("Occurrence\tUniqReadNumber", file=SEQ) ++ for i in list(seqDup.values()): #key is occurence, value is uniq reads number (based on seq) + seqDup_count[i] +=1 +- for k in sorted(seqDup_count.iterkeys()): +- print >>SEQ, str(k) +'\t'+ str(seqDup_count[k]) ++ for k in sorted(seqDup_count.keys()): ++ print(str(k) +'\t'+ str(seqDup_count[k]), file=SEQ) + SEQ.close() + +- print >>sys.stderr, "report duplicte rate based on mapping ..." +- print >>POS, "Occurrence\tUniqReadNumber" +- for i in posDup.values(): #key is occurence, value is uniq reads number (based on coord) ++ print("report duplicte rate based on mapping ...", file=sys.stderr) ++ print("Occurrence\tUniqReadNumber", file=POS) ++ for i in list(posDup.values()): #key is occurence, value is uniq reads number (based on coord) + posDup_count[i] +=1 +- for k in sorted(posDup_count.iterkeys()): +- print >>POS, str(k) +'\t'+ str(posDup_count[k]) ++ for k in sorted(posDup_count.keys()): ++ print(str(k) +'\t'+ str(posDup_count[k]), file=POS) + POS.close() + + +- print >>sys.stderr, "generate R script ..." +- print >>RS, "pdf('duplicateRead.pdf')" +- print >>RS, "par(mar=c(5,4,4,5),las=0)" +- print >>RS, "seq_occ=c(" + ','.join([str(i) for i in sorted(seqDup_count.iterkeys()) ]) + ')' +- print >>RS, "seq_uniqRead=c(" + ','.join([str(seqDup_count[i]) for i in sorted(seqDup_count.iterkeys()) ]) + ')' +- print >>RS, "pos_occ=c(" + ','.join([str(i) for i in sorted(posDup_count.iterkeys()) ]) + ')' +- print >>RS, "pos_uniqRead=c(" + ','.join([str(posDup_count[i]) for i in sorted(posDup_count.iterkeys()) ]) + ')' +- print >>RS, "plot(pos_occ,log10(pos_uniqRead),ylab='Number of Reads (log10)',xlab='Frequency',pch=4,cex=0.8,col='blue',xlim=c(1,%d),yaxt='n')" % up_bound +- print >>RS, "points(seq_occ,log10(seq_uniqRead),pch=20,cex=0.8,col='red')" +- print >>RS, 'ym=floor(max(log10(pos_uniqRead)))' +- print >>RS, "legend(%d,ym,legend=c('Sequence-base','Mapping-base'),col=c('red','blue'),pch=c(4,20))" % max(up_bound-200,1) +- print >>RS, 'axis(side=2,at=0:ym,labels=0:ym)' +- print >>RS, 'axis(side=4,at=c(log10(pos_uniqRead[1]),log10(pos_uniqRead[2]),log10(pos_uniqRead[3]),log10(pos_uniqRead[4])), labels=c(round(pos_uniqRead[1]*100/sum(pos_uniqRead)),round(pos_uniqRead[2]*100/sum(pos_uniqRead)),round(pos_uniqRead[3]*100/sum(pos_uniqRead)),round(pos_uniqRead[4]*100/sum(pos_uniqRead))))' +- print >>RS, 'mtext(4, text = "Reads %", line = 2)' ++ print("generate R script ...", file=sys.stderr) ++ print("pdf('duplicateRead.pdf')", file=RS) ++ print("par(mar=c(5,4,4,5),las=0)", file=RS) ++ print("seq_occ=c(" + ','.join([str(i) for i in sorted(seqDup_count.keys()) ]) + ')', file=RS) ++ print("seq_uniqRead=c(" + ','.join([str(seqDup_count[i]) for i in sorted(seqDup_count.keys()) ]) + ')', file=RS) ++ print("pos_occ=c(" + ','.join([str(i) for i in sorted(posDup_count.keys()) ]) + ')', file=RS) ++ print("pos_uniqRead=c(" + ','.join([str(posDup_count[i]) for i in sorted(posDup_count.keys()) ]) + ')', file=RS) ++ print("plot(pos_occ,log10(pos_uniqRead),ylab='Number of Reads (log10)',xlab='Frequency',pch=4,cex=0.8,col='blue',xlim=c(1,%d),yaxt='n')" % up_bound, file=RS) ++ print("points(seq_occ,log10(seq_uniqRead),pch=20,cex=0.8,col='red')", file=RS) ++ print('ym=floor(max(log10(pos_uniqRead)))', file=RS) ++ print("legend(%d,ym,legend=c('Sequence-base','Mapping-base'),col=c('red','blue'),pch=c(4,20))" % max(up_bound-200,1), file=RS) ++ print('axis(side=2,at=0:ym,labels=0:ym)', file=RS) ++ print('axis(side=4,at=c(log10(pos_uniqRead[1]),log10(pos_uniqRead[2]),log10(pos_uniqRead[3]),log10(pos_uniqRead[4])), labels=c(round(pos_uniqRead[1]*100/sum(pos_uniqRead)),round(pos_uniqRead[2]*100/sum(pos_uniqRead)),round(pos_uniqRead[3]*100/sum(pos_uniqRead)),round(pos_uniqRead[4]*100/sum(pos_uniqRead))))', file=RS) ++ print('mtext(4, text = "Reads %", line = 2)', file=RS) + #self.f.seek(0) + + def getUniqMapRead(self,outfile=None): +@@ -655,7 +655,7 @@ class ParseSAM: + outfile = self.fileName + ".uniq.sam" + FO=open(outfile,'w') + Uniqcount=0 +- print >>sys.stderr, "Writing uniquely mapped reads to\"",outfile,"\"... ", ++ print("Writing uniquely mapped reads to\"",outfile,"\"... ", end=' ', file=sys.stderr) + for line in self.f: + hits=[] + if line[0] == '@':continue #skip head lines +@@ -667,11 +667,11 @@ class ParseSAM: + #else: + #print >>sys.stderr,line, + if (ParseSAM._uniqueHit_pat.search(line)): +- print >>sys.stderr,line, ++ print(line, end=' ', file=sys.stderr) + Uniqcount +=1 + FO.write(line) + FO.close() +- print >>sys.stderr, str(Uniqcount) + " reads were saved!\n", ++ print(str(Uniqcount) + " reads were saved!\n", end=' ', file=sys.stderr) + self.f.seek(0) + + def getWrongStrand(self,outfile=None): +@@ -680,7 +680,7 @@ class ParseSAM: + outfile = self.fileName + ".wrongStrand.sam" + FO=open(outfile,'w') + wrongStrand=0 +- print >>sys.stderr, "Writing incorrectly stranded reads to\"",outfile,"\"... ", ++ print("Writing incorrectly stranded reads to\"",outfile,"\"... ", end=' ', file=sys.stderr) + for line in self.f: + hits=[] + if line.startswith('@'):continue #skip head lines +@@ -701,7 +701,7 @@ class ParseSAM: + wrongStrand+=1 + + FO.close() +- print >>sys.stderr, str(wrongStrand) + " reads were saved!\n", ++ print(str(wrongStrand) + " reads were saved!\n", end=' ', file=sys.stderr) + self.f.seek(0) + + def filterSpliceRead(self,outfile=None,min_overhang=8,min_gap=50,max_gap=1000000): +@@ -714,7 +714,7 @@ class ParseSAM: + outfile = self.fileName + ".SR.sam" + #outfile2 = self.fileName + ".SR.filter.sam" + splice_sites=collections.defaultdict(set) +- print >>sys.stderr, "\tDetermine splice sites with proper overhang, intron size ... ", ++ print("\tDetermine splice sites with proper overhang, intron size ... ", end=' ', file=sys.stderr) + for line in self.f: + if line[0] == '@':continue #skip head lines + if ParseSAM._reExpr2.match(line):continue #skip blank lines +@@ -741,12 +741,12 @@ class ParseSAM: + if (comb[2] >= min_overhang): + splice_sites[chrom].add(map_st + comb[0] + comb[1]) + self.f.seek(0) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + + FO=open(outfile,'w') + #FO2=open(outfile2,'w') +- print >>sys.stderr, "\tExtracting splicing reads ... ", ++ print("\tExtracting splicing reads ... ", end=' ', file=sys.stderr) + total_SR =0 + extract_SR =0 + total_read =0 +@@ -778,10 +778,10 @@ class ParseSAM: + else: + #FO2.write(line) + continue +- print >>sys.stderr, "Done" +- print >>sys.stderr, "\tTotal mapped Read: " + str(total_read) +- print >>sys.stderr, "\tTotal Splicing Read: " + str(total_SR) +- print >>sys.stderr, "\Usable Splicing Read: " + str(extract_SR) ++ print("Done", file=sys.stderr) ++ print("\tTotal mapped Read: " + str(total_read), file=sys.stderr) ++ print("\tTotal Splicing Read: " + str(total_SR), file=sys.stderr) ++ print("\\Usable Splicing Read: " + str(extract_SR), file=sys.stderr) + FO.close() + #FO2.close() + self.f.seek(0) +@@ -792,7 +792,7 @@ class ParseSAM: + if outfile is None: + outfile = self.fileName + ".SR.sam" + FO=open(outfile,'w') +- print >>sys.stderr, "\tExtract splicing reads without any filter ...", ++ print("\tExtract splicing reads without any filter ...", end=' ', file=sys.stderr) + for line in self.f: + if line[0] == '@':continue #skip head lines + if ParseSAM._reExpr2.match(line):continue #skip blank lines +@@ -803,7 +803,7 @@ class ParseSAM: + if (len(comb)>=3): + FO.write(line) + +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + self.f.seek(0) + FO.close() + +@@ -812,7 +812,7 @@ class ParseSAM: + The original SAM file must be sorted before hand. if not, using linux command like "sort -k3,3 -k4,4n myfile.sam >myfile.sorted.sam" ''' + if outfile is None: + outfile = self.fileName + ".collapsed.sam" +- print >>sys.stderr, "Writing collapsed SAM file to\"",outfile,"\"... " ++ print("Writing collapsed SAM file to\"",outfile,"\"... ", file=sys.stderr) + FO=open(outfile,'w') + flag="" + for line in self.f: +@@ -840,7 +840,7 @@ class ParseSAM: + else: + outfile = outfile + ".qual.plot.r" + FO=open(outfile,'w') +- print >>sys.stderr, "\tcalculating quality score ... " ++ print("\tcalculating quality score ... ", file=sys.stderr) + qual_min={} + qual_max={} + qual_sum={} +@@ -875,16 +875,16 @@ class ParseSAM: + max_qualities =[str(qual_max[i]) for i in range(0,read_len)] + avg_qualities = [str(qual_sum[i]/total_read) for i in range(0,read_len)] + nt_pos = [str(i) for i in range(0,read_len)] +- print >>FO, "nt_pos=c(" + ','.join(nt_pos) + ')' +- print >>FO, "max_qual=c(" + ','.join(max_qualities) + ')' +- print >>FO, "min_qual=c(" + ','.join(min_qualities) + ')' +- print >>FO, "avg_qual=c(" + ','.join(avg_qualities) + ')' +- print >>FO, "pdf('phred_qual.pdf')" +- print >>FO, "plot(nt_pos,avg_qual, xlab=\"Nucleotide Position (5'->3')\", ylab='Phred Quality',ylim=c(0,97),lwd=2,type='s')" +- print >>FO, 'lines(nt_pos,max_qual,type="s",lwd=2,col="red")' +- print >>FO, 'lines(nt_pos,min_qual,type="s",lwd=2,col="blue")' +- print >>FO, 'legend(0,100,legend=c("Max","Average","Min"),col=c("red","black","blue"),lwd=2)' +- print >>FO, 'dev.off()' ++ print("nt_pos=c(" + ','.join(nt_pos) + ')', file=FO) ++ print("max_qual=c(" + ','.join(max_qualities) + ')', file=FO) ++ print("min_qual=c(" + ','.join(min_qualities) + ')', file=FO) ++ print("avg_qual=c(" + ','.join(avg_qualities) + ')', file=FO) ++ print("pdf('phred_qual.pdf')", file=FO) ++ print("plot(nt_pos,avg_qual, xlab=\"Nucleotide Position (5'->3')\", ylab='Phred Quality',ylim=c(0,97),lwd=2,type='s')", file=FO) ++ print('lines(nt_pos,max_qual,type="s",lwd=2,col="red")', file=FO) ++ print('lines(nt_pos,min_qual,type="s",lwd=2,col="blue")', file=FO) ++ print('legend(0,100,legend=c("Max","Average","Min"),col=c("red","black","blue"),lwd=2)', file=FO) ++ print('dev.off()', file=FO) + #for i in range(0,read_len): + # print >>sys.stderr, str(i) + '\t' + str(qual_max[i]) + '\t' + str(qual_min[i]) + '\t' + str(qual_sum[i]/total_read) + #self.f.seek(0) +@@ -918,7 +918,7 @@ class ParseSAM: + scores[chrom][pos] =1 + else: + scores[chrom][pos] +=1 +- if lines % 10000 == 0: print >>sys.stderr, "%i lines loaded \r" % lines ++ if lines % 10000 == 0: print("%i lines loaded \r" % lines, file=sys.stderr) + return scores + self.f.seek(0) + +@@ -943,7 +943,7 @@ class QCSAM: + The 5th column is number of reads fallen into the region defined by the first 3 columns''' + + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + if outfile is None: + exon_count = self.fileName + "_exon.count.bed" +@@ -968,7 +968,7 @@ class QCSAM: + splicedReads=0 + + #read SAM +- print >>sys.stderr, "reading "+ self.fileName + '...', ++ print("reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -990,10 +990,10 @@ class QCSAM: + ranges[chrom].add_interval( Interval( mid, mid ) ) + + self.f.seek(0) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + #read refbed file +- print >>sys.stderr, "Assign reads to "+ refbed + '...', ++ print("Assign reads to "+ refbed + '...', end=' ', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith('#'):continue +@@ -1007,14 +1007,14 @@ class QCSAM: + geneName = fields[3] + strand = fields[5].replace(" ","_") + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + intron_starts = exon_ends[:-1] + intron_ends=exon_starts[1:] + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + + # assign reads to intron +@@ -1050,28 +1050,28 @@ class QCSAM: + EXON_OUT.write(chrom + "\t" + str(st) + "\t" + str(end) + "\t" + geneName + "_exon_" + str(exonNum) + "\t" + str(hits) + "\t" + strand + '\n') + exonNum += 1 + intergenicReads=totalReads-exonReads-intronReads-splicedReads +- print >>sys.stderr, "Done." + '\n' +- print >>sys.stderr, "Total reads:\t" + str(totalReads) +- print >>sys.stderr, "Exonic reads:\t" + str(exonReads) +- print >>sys.stderr, "Intronic reads:\t" + str(intronReads) +- print >>sys.stderr, "Splicing reads:\t" + str(splicedReads) +- print >>sys.stderr, "Intergenic reads:\t" + str(intergenicReads) ++ print("Done." + '\n', file=sys.stderr) ++ print("Total reads:\t" + str(totalReads), file=sys.stderr) ++ print("Exonic reads:\t" + str(exonReads), file=sys.stderr) ++ print("Intronic reads:\t" + str(intronReads), file=sys.stderr) ++ print("Splicing reads:\t" + str(splicedReads), file=sys.stderr) ++ print("Intergenic reads:\t" + str(intergenicReads), file=sys.stderr) + +- print >>sys.stderr,"writing R script ...", ++ print("writing R script ...", end=' ', file=sys.stderr) + totalReads=float(totalReads) +- print >>R_OUT, "pdf('%s')" % rpdf +- print >>R_OUT, "dat=c(%d,%d,%d,%d)" % (exonReads,splicedReads,intronReads,intergenicReads) +- print >>R_OUT, "lb=c('exon(%.2f)','junction(%.2f)','intron(%.2f)','intergenic(%.2f)')" % (exonReads/totalReads,splicedReads/totalReads,intronReads/totalReads,intergenicReads/totalReads) +- print >>R_OUT, "pie(dat,labels=lb,col=rainbow(4),clockwise=TRUE,main='Total reads = %d')" % int(totalReads) +- print >>R_OUT, "dev.off()" +- print >>sys.stderr, "Done." ++ print("pdf('%s')" % rpdf, file=R_OUT) ++ print("dat=c(%d,%d,%d,%d)" % (exonReads,splicedReads,intronReads,intergenicReads), file=R_OUT) ++ print("lb=c('exon(%.2f)','junction(%.2f)','intron(%.2f)','intergenic(%.2f)')" % (exonReads/totalReads,splicedReads/totalReads,intronReads/totalReads,intergenicReads/totalReads), file=R_OUT) ++ print("pie(dat,labels=lb,col=rainbow(4),clockwise=TRUE,main='Total reads = %d')" % int(totalReads), file=R_OUT) ++ print("dev.off()", file=R_OUT) ++ print("Done.", file=sys.stderr) + + + def coverageGeneBody(self,refbed,outfile=None): + '''Calculate reads coverage over gene body, from 5'to 3'. each gene will be equally divied + into 100 regsions''' + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + if outfile is None: + outfile1 = self.fileName + ".geneBodyCoverage_plot.r" +@@ -1088,7 +1088,7 @@ class QCSAM: + rpkm={} + + #read SAM +- print >>sys.stderr, "reading "+ self.fileName + '...', ++ print("reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -1114,9 +1114,9 @@ class QCSAM: + ranges[chrom] = Intersecter() + else: + ranges[chrom].add_interval( Interval( st, st+size ) ) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>sys.stderr, "calculating coverage over gene body ..." ++ print("calculating coverage over gene body ...", file=sys.stderr) + coverage=collections.defaultdict(int) + flag=0 + for line in open(refbed,'r'): +@@ -1130,19 +1130,19 @@ class QCSAM: + geneName = fields[3] + strand = fields[5] + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + gene_all_base=[] + percentile_base=[] + mRNA_len =0 + flag=0 + for st,end in zip(exon_starts,exon_ends): +- gene_all_base.extend(range(st+1,end+1)) #0-based coordinates on genome ++ gene_all_base.extend(list(range(st+1,end+1))) #0-based coordinates on genome + mRNA_len = len(gene_all_base) + if mRNA_len <100: + flag=1 +@@ -1159,18 +1159,18 @@ class QCSAM: + coverage[i] += len(ranges[chrom].find(percentile_base[i], percentile_base[i]+1)) + x_coord=[] + y_coord=[] +- print >>OUT2, "Total reads: " + str(totalReads) +- print >>OUT2, "Fragment number: " + str(fragment_num) +- print >>OUT2, "percentile\tcount" ++ print("Total reads: " + str(totalReads), file=OUT2) ++ print("Fragment number: " + str(fragment_num), file=OUT2) ++ print("percentile\tcount", file=OUT2) + for i in coverage: + x_coord.append(str(i)) + y_coord.append(str(coverage[i])) +- print >>OUT2, str(i) + '\t' + str(coverage[i]) +- print >>OUT1, "pdf('geneBody_coverage.pdf')" +- print >>OUT1, "x=0:100" +- print >>OUT1, "y=c(" + ','.join(y_coord) + ')' +- print >>OUT1, "plot(x,y,xlab=\"percentile of gene body (5'->3')\",ylab='read number',type='s')" +- print >>OUT1, "dev.off()" ++ print(str(i) + '\t' + str(coverage[i]), file=OUT2) ++ print("pdf('geneBody_coverage.pdf')", file=OUT1) ++ print("x=0:100", file=OUT1) ++ print("y=c(" + ','.join(y_coord) + ')', file=OUT1) ++ print("plot(x,y,xlab=\"percentile of gene body (5'->3')\",ylab='read number',type='s')", file=OUT1) ++ print("dev.off()", file=OUT1) + + def calculateRPKM(self,refbed,outfile=None): + '''calculate RPKM values for each gene in refbed. Only uniquely aligned reads are used. +@@ -1178,7 +1178,7 @@ class QCSAM: + exon per Million mapped reads) for each exon, intron and mRNA''' + + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + if outfile is None: + rpkm_file = self.fileName + ".rpkm.xls" +@@ -1194,7 +1194,7 @@ class QCSAM: + rpkm={} + + #read SAM +- print >>sys.stderr, "reading "+ self.fileName + '...', ++ print("reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -1228,17 +1228,17 @@ class QCSAM: + ranges[chrom].add_interval( Interval( mid, mid ) ) + + self.f.seek(0) +- print >>sys.stderr, "Done" +- print >>RPKM_OUT, "Total mapped reads (TR): " + str(totalReads) +- print >>RPKM_OUT, "Multiple mapped reads (MR): " + str(multiMapReads) +- print >>RPKM_OUT, "Uniquely mapped reads (UR): " + str(totalReads - multiMapReads) +- print >>RPKM_OUT, "Spliced mapped reads (SR): " + str(sR) +- print >>RPKM_OUT, "Corrected uniquely mapped reads (cUR): " + str(cUR) ++ print("Done", file=sys.stderr) ++ print("Total mapped reads (TR): " + str(totalReads), file=RPKM_OUT) ++ print("Multiple mapped reads (MR): " + str(multiMapReads), file=RPKM_OUT) ++ print("Uniquely mapped reads (UR): " + str(totalReads - multiMapReads), file=RPKM_OUT) ++ print("Spliced mapped reads (SR): " + str(sR), file=RPKM_OUT) ++ print("Corrected uniquely mapped reads (cUR): " + str(cUR), file=RPKM_OUT) + if totalReads ==0: + sys.exit(1) + + #read refbed file +- print >>sys.stderr, "Assign reads to "+ refbed + '...', ++ print("Assign reads to "+ refbed + '...', end=' ', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith('#'):continue +@@ -1252,16 +1252,16 @@ class QCSAM: + geneName = fields[3] + strand = fields[5].replace(" ","_") + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends) +- exon_sizes = map(int,fields[10].rstrip(',\n').split(',')) ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)) ++ exon_sizes = list(map(int,fields[10].rstrip(',\n').split(','))) + intron_starts = exon_ends[:-1] + intron_ends=exon_starts[1:] + key='\t'.join((chrom.lower(),str(tx_start),str(tx_end),geneName,'0',strand)) + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + + # assign reads to intron +@@ -1309,7 +1309,7 @@ class QCSAM: + except: + RPKM_OUT.write(chrom.lower() + "\t" + str(tx_start) + "\t" + str(tx_end) + "\t" + geneName + "_mRNA" + "\t" + str(0) + "\t" + strand + '\t' + str(0) +'\n') + rpkm[key] = 0 +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + return rpkm + self.f.seek(0) + +@@ -1320,7 +1320,7 @@ class QCSAM: + NOTE: intronic reads are not counted as part of total reads''' + + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + if outfile is None: + rpkm_file = self.fileName + ".rpkm.xls" +@@ -1338,7 +1338,7 @@ class QCSAM: + rpkm={} + + #read gene model file, the purpose is to remove intronic reads +- print >>sys.stderr, "Reading reference gene model "+ refbed + '...' ++ print("Reading reference gene model "+ refbed + '...', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith(('#','track','browser')):continue +@@ -1351,12 +1351,12 @@ class QCSAM: + geneName = fields[3] + strand = fields[5].replace(" ","_") + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + + for st,end in zip(exon_starts,exon_ends): +@@ -1366,7 +1366,7 @@ class QCSAM: + exon_ranges[chrom].add_interval( Interval( st, end ) ) + + #read SAM +- print >>sys.stderr, "reading "+ self.fileName + '...', ++ print("reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -1401,22 +1401,22 @@ class QCSAM: + ranges[chrom] = Intersecter() + else: + ranges[chrom].add_interval( Interval( mid, mid ) ) +- else: #if this framgnet is intronic, skip it. ++ else: #if this framgnet is intronic, skip it. + #intronic +=1 +- continue ++ continue + self.f.seek(0) +- print >>sys.stderr, "Done" +- print >>RPKM_OUT, "Total mapped reads (TR): " + str(totalReads) +- print >>RPKM_OUT, "Multiple mapped reads (MR): " + str(multiMapReads) +- print >>RPKM_OUT, "Uniquely mapped reads (UR): " + str(totalReads - multiMapReads) +- print >>RPKM_OUT, "Spliced mapped reads (SR): " + str(sR) +- print >>RPKM_OUT, "Corrected uniquely mapped reads (cUR, non-intronic fragments): " + str(cUR) ++ print("Done", file=sys.stderr) ++ print("Total mapped reads (TR): " + str(totalReads), file=RPKM_OUT) ++ print("Multiple mapped reads (MR): " + str(multiMapReads), file=RPKM_OUT) ++ print("Uniquely mapped reads (UR): " + str(totalReads - multiMapReads), file=RPKM_OUT) ++ print("Spliced mapped reads (SR): " + str(sR), file=RPKM_OUT) ++ print("Corrected uniquely mapped reads (cUR, non-intronic fragments): " + str(cUR), file=RPKM_OUT) + #print >>RPKM_OUT, "Intronic Fragments (IF): " + str(intronic) + if totalReads ==0: + sys.exit(1) + + #read refbed file +- print >>sys.stderr, "Assign reads to "+ refbed + '...', ++ print("Assign reads to "+ refbed + '...', end=' ', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith('#'):continue +@@ -1430,16 +1430,16 @@ class QCSAM: + geneName = fields[3] + strand = fields[5].replace(" ","_") + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends) +- exon_sizes = map(int,fields[10].rstrip(',\n').split(',')) ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)) ++ exon_sizes = list(map(int,fields[10].rstrip(',\n').split(','))) + intron_starts = exon_ends[:-1] + intron_ends=exon_starts[1:] + key='\t'.join((chrom.lower(),str(tx_start),str(tx_end),geneName,'0',strand)) + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + + # assign reads to intron +@@ -1487,7 +1487,7 @@ class QCSAM: + except: + RPKM_OUT.write(chrom.lower() + "\t" + str(tx_start) + "\t" + str(tx_end) + "\t" + geneName + "_mRNA" + "\t" + str(0) + "\t" + strand + '\t' + str(0) +'\n') + rpkm[key] = 0 +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + return rpkm + self.f.seek(0) + +@@ -1499,7 +1499,7 @@ class QCSAM: + unknownReads=0 + ranges={} + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + + if outfile is None: +@@ -1508,7 +1508,7 @@ class QCSAM: + out_file = outfile + ".unknownReads.SAM" + OUT=open(out_file,'w') + +- print >>sys.stderr, "Reading reference gene model "+ refbed + '...' ++ print("Reading reference gene model "+ refbed + '...', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith(('#','track','browser')):continue +@@ -1521,12 +1521,12 @@ class QCSAM: + geneName = fields[3] + strand = fields[5].replace(" ","_") + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + + for st,end in zip(exon_starts,exon_ends): +@@ -1535,7 +1535,7 @@ class QCSAM: + else: + ranges[chrom].add_interval( Interval( st, end ) ) + +- print >>sys.stderr, "Processing SAM file "+ self.fileName + '...' ++ print("Processing SAM file "+ self.fileName + '...', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -1564,8 +1564,8 @@ class QCSAM: + OUT.write(line) + unknownReads +=1 + OUT.close() +- print >>sys.stderr, "Total reads mapped to genome: " + str(totalReads) +- print >>sys.stderr, "Total reads not overlapped with any exon: " + str(unknownReads) ++ print("Total reads mapped to genome: " + str(totalReads), file=sys.stderr) ++ print("Total reads not overlapped with any exon: " + str(unknownReads), file=sys.stderr) + self.f.seek(0) + + def genomicFragSize(self,outfile=None,low_bound=0,up_bound=1000,step=10): +@@ -1589,16 +1589,16 @@ class QCSAM: + ranges={} + ranges[chrom]=Intersecter() + +- window_left_bound = range(low_bound,up_bound,step) +- frag_size=0 ++ window_left_bound = list(range(low_bound,up_bound,step)) ++ frag_size=0 + +- pair_num=0.0 +- ultra_low=0.0 +- ultra_high=0.0 +- size=[] +- counts=[] +- count=0 +- print >>sys.stderr, "Reading SAM file "+ self.fileName + ' ... ', ++ pair_num=0.0 ++ ultra_low=0.0 ++ ultra_high=0.0 ++ size=[] ++ counts=[] ++ count=0 ++ print("Reading SAM file "+ self.fileName + ' ... ', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -1606,7 +1606,7 @@ class QCSAM: + # continue + flagCode=string.atoi(fields[1]) + if (flagCode & 0x0001) ==0: +- print >>sys.stderr,"NOT pair-end sequencing" ++ print("NOT pair-end sequencing", file=sys.stderr) + sys.exit(1) + if (flagCode & 0x0004) != 0: continue #skip unmap reads + if not ParseSAM._uniqueHit_pat.search(line): #skip multiple mapped reads +@@ -1632,29 +1632,29 @@ class QCSAM: + ultra_high +=1 + continue + ranges[chrom].add_interval( Interval( frag_size-1, frag_size ) ) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + if pair_num==0: +- print >>sys.stderr, "Cannot find paired reads" ++ print("Cannot find paired reads", file=sys.stderr) + sys.exit(0) +- print >>FQ, "Total paired read " + str(pair_num) +- print >>FQ, "<=" + str(low_bound) + "\t"+ str(ultra_low) ++ print("Total paired read " + str(pair_num), file=FQ) ++ print("<=" + str(low_bound) + "\t"+ str(ultra_low), file=FQ) + for st in window_left_bound: + size.append(str(st + step/2)) + count = str(len(ranges[chrom].find(st,st + step))) + counts.append(count) +- print >>FQ, str(st) + '\t' + str(st+step) +'\t' + count +- print >>FQ, ">" + str(up_bound) + "\t"+ str(ultra_high) ++ print(str(st) + '\t' + str(st+step) +'\t' + count, file=FQ) ++ print(">" + str(up_bound) + "\t"+ str(ultra_high), file=FQ) + +- print >>RS, "pdf('gFragSize.pdf')" +- print >>RS, "par(mfrow=c(2,1),cex.main=0.8,cex.lab=0.8,cex.axis=0.8,mar=c(4,4,4,1))" +- print >>RS, 'pie(c(%d,%d,%d),col=rainbow(3),cex=0.5,radius=1,main="Total %d fragments",labels=c("fraSize <= %d\\n(%4.2f%%)","fragSize > %d\\n(%4.2f%%)","%d < fragSize <= %d\\n(%4.2f%%)"), density=rep(80,80,80),angle=c(90,140,170))' % (ultra_low, ultra_high, pair_num -ultra_low -ultra_high, pair_num, low_bound, ultra_low*100/pair_num, up_bound, ultra_high*100/pair_num, low_bound, up_bound, 100-ultra_low*100/pair_num - ultra_high*100/pair_num) +- print >>RS, 'fragsize=rep(c(' + ','.join(size) + '),' + 'times=c(' + ','.join(counts) + '))' +- print >>RS, 'frag_sd = round(sd(fragsize))' +- print >>RS, 'frag_mean = round(mean(fragsize))' +- print >>RS, 'hist(fragsize,probability=T,breaks=%d,xlab="Fragment size (bp)",main=paste(c("Mean=",frag_mean,";","SD=",frag_sd),collapse=""),border="blue")' % len(window_left_bound) +- print >>RS, "lines(density(fragsize,bw=%d),col='red')" % (2*step) +- print >>RS ,"dev.off()" ++ print("pdf('gFragSize.pdf')", file=RS) ++ print("par(mfrow=c(2,1),cex.main=0.8,cex.lab=0.8,cex.axis=0.8,mar=c(4,4,4,1))", file=RS) ++ print('pie(c(%d,%d,%d),col=rainbow(3),cex=0.5,radius=1,main="Total %d fragments",labels=c("fraSize <= %d\\n(%4.2f%%)","fragSize > %d\\n(%4.2f%%)","%d < fragSize <= %d\\n(%4.2f%%)"), density=rep(80,80,80),angle=c(90,140,170))' % (ultra_low, ultra_high, pair_num -ultra_low -ultra_high, pair_num, low_bound, ultra_low*100/pair_num, up_bound, ultra_high*100/pair_num, low_bound, up_bound, 100-ultra_low*100/pair_num - ultra_high*100/pair_num), file=RS) ++ print('fragsize=rep(c(' + ','.join(size) + '),' + 'times=c(' + ','.join(counts) + '))', file=RS) ++ print('frag_sd = round(sd(fragsize))', file=RS) ++ print('frag_mean = round(mean(fragsize))', file=RS) ++ print('hist(fragsize,probability=T,breaks=%d,xlab="Fragment size (bp)",main=paste(c("Mean=",frag_mean,";","SD=",frag_sd),collapse=""),border="blue")' % len(window_left_bound), file=RS) ++ print("lines(density(fragsize,bw=%d),col='red')" % (2*step), file=RS) ++ print("dev.off()", file=RS) + FO.close() + FQ.close() + RS.close() +@@ -1665,7 +1665,7 @@ class QCSAM: + '''for each gene, check if its RPKM (epxresion level) has already been saturated or not''' + + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + if outfile is None: + rpkm_file = self.fileName + ".eRPKM.xls" +@@ -1685,7 +1685,7 @@ class QCSAM: + #read SAM + my_pat = re.compile(r'NH:i:(\d+)\b') + NH_tag=0 +- print >>sys.stderr, "Reading "+ self.fileName + '...', ++ print("Reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -1698,7 +1698,7 @@ class QCSAM: + elif len(hitNum) ==1: + if int(hitNum[0])>1: continue #skip multiple mapped reads + else: +- print >>sys.stderr, "More than 1 NH tag found within a single line. Incorrect SAM format!" ++ print("More than 1 NH tag found within a single line. Incorrect SAM format!", file=sys.stderr) + sys.exit(1) + + chrom = fields[2].upper() +@@ -1719,12 +1719,12 @@ class QCSAM: + block_list.append(chrom + ":" + str(mid)) + + if NH_tag==1: +- print >>sys.stderr, "Warn: NO NH tag found. Cannot determine uniqueness of alignment. All alignments will be used" +- print >>sys.stderr, "Done" ++ print("Warn: NO NH tag found. Cannot determine uniqueness of alignment. All alignments will be used", file=sys.stderr) ++ print("Done", file=sys.stderr) + +- print >>sys.stderr, "shuffling alignments ...", ++ print("shuffling alignments ...", end=' ', file=sys.stderr) + random.shuffle(block_list) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + + ranges={} +@@ -1734,7 +1734,7 @@ class QCSAM: + rawCount_table=collections.defaultdict(list) + RPKM_head=['chr','start','end','name','score','strand'] + +- tmp=range(sample_start,sample_end,sample_step) ++ tmp=list(range(sample_start,sample_end,sample_step)) + tmp.append(100) + #=========================sampling uniquely mapped reads from population + for pertl in tmp: #[5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95,100] +@@ -1744,7 +1744,7 @@ class QCSAM: + sample_size += index_end -index_st + + RPKM_head.append(str(pertl) + '%') +- print >>sys.stderr, "sampling " + str(pertl) +"% (" + str(sample_size) + ") fragments ...", ++ print("sampling " + str(pertl) +"% (" + str(sample_size) + ") fragments ...", end=' ', file=sys.stderr) + for i in range(index_st, index_end): + (chr,coord) = block_list[i].split(':') + if chr not in ranges: +@@ -1763,14 +1763,14 @@ class QCSAM: + tx_end = int( fields[2] ) + geneName = fields[3] + strand = fields[5].replace(" ","_") +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends) +- exon_sizes = map(int,fields[10].rstrip(',\n').split(',')) ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)) ++ exon_sizes = list(map(int,fields[10].rstrip(',\n').split(','))) + key='\t'.join((chrom.lower(),str(tx_start),str(tx_end),geneName,'0',strand)) + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, file=sys.stderr) + continue + mRNA_count=0 #we need to initializ it to 0 for each gene + mRNA_len=sum(exon_sizes) +@@ -1778,24 +1778,24 @@ class QCSAM: + if chrom in ranges: + mRNA_count += len(ranges[chrom].find(st,end)) + if mRNA_len ==0: +- print >>sys.stderr, geneName + " has 0 nucleotides. Exit!" ++ print(geneName + " has 0 nucleotides. Exit!", file=sys.stderr) + sys.exit(1) + if sample_size == 0: +- print >>sys.stderr, "Too few reads to sample. Exit!" ++ print("Too few reads to sample. Exit!", file=sys.stderr) + sys.exit(1) + mRNA_RPKM = (mRNA_count * 1000000000.0)/(mRNA_len * sample_size) + RPKM_table[key].append(str(mRNA_RPKM)) + rawCount_table[key].append(str(mRNA_count)) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + #self.f.seek(0) +- print >>RPKM_OUT, '\t'.join(RPKM_head) +- print >>RAW_OUT, '\t'.join(RPKM_head) ++ print('\t'.join(RPKM_head), file=RPKM_OUT) ++ print('\t'.join(RPKM_head), file=RAW_OUT) + for key in RPKM_table: +- print >>RPKM_OUT, key + '\t', +- print >>RPKM_OUT, '\t'.join(RPKM_table[key]) +- print >>RAW_OUT, key + '\t', +- print >>RAW_OUT, '\t'.join(rawCount_table[key]) ++ print(key + '\t', end=' ', file=RPKM_OUT) ++ print('\t'.join(RPKM_table[key]), file=RPKM_OUT) ++ print(key + '\t', end=' ', file=RAW_OUT) ++ print('\t'.join(rawCount_table[key]), file=RAW_OUT) + + def saturation_junction(self,refgene,outfile=None,sample_start=5,sample_step=5,sample_end=100,min_intron=50,recur=1): + '''check if an RNA-seq experiment is saturated in terms of detecting known splicing junction''' +@@ -1805,7 +1805,7 @@ class QCSAM: + else: + out_file = outfile + ".junctionSaturation_plot.r" + if refgene is None: +- print >>sys.stderr, "You must provide reference gene model in bed format." ++ print("You must provide reference gene model in bed format.", file=sys.stderr) + sys.exit(1) + + OUT = open(out_file,'w') +@@ -1813,12 +1813,12 @@ class QCSAM: + + #reading reference gene + knownSpliceSites= set() +- print >>sys.stderr, "reading reference bed file: ",refgene, " ... ", ++ print("reading reference bed file: ",refgene, " ... ", end=' ', file=sys.stderr) + for line in open(refgene,'r'): + if line.startswith(('#','track','browser')):continue + fields = line.split() + if(len(fields)<12): +- print >>sys.stderr, "Invalid bed line (skipped):",line, ++ print("Invalid bed line (skipped):",line, end=' ', file=sys.stderr) + continue + chrom = fields[0].upper() + tx_start = int( fields[1] ) +@@ -1826,15 +1826,15 @@ class QCSAM: + if int(fields[9] ==1): + continue + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + intron_start = exon_ends[:-1] + intron_end=exon_starts[1:] + for st,end in zip (intron_start, intron_end): + knownSpliceSites.add(chrom + ":" + str(st) + "-" + str(end)) +- print >>sys.stderr,"Done! Total "+str(len(knownSpliceSites)) + " known splicing sites" ++ print("Done! Total "+str(len(knownSpliceSites)) + " known splicing sites", file=sys.stderr) + + + #read SAM file +@@ -1842,7 +1842,7 @@ class QCSAM: + intron_start=[] + intron_end=[] + uniqSpliceSites=collections.defaultdict(int) +- print >>sys.stderr, "Reading "+ self.fileName + '...', ++ print("Reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -1883,13 +1883,13 @@ class QCSAM: + for st,end in zip(intron_st, intron_end): + samSpliceSites.append(chrom + ":" + str(st) + "-" + str(end)) + #self.f.seek(0) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + + +- print >>sys.stderr, "shuffling alignments ...", ++ print("shuffling alignments ...", end=' ', file=sys.stderr) + random.shuffle(samSpliceSites) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + #resampling + SR_num = len(samSpliceSites) +@@ -1898,7 +1898,7 @@ class QCSAM: + known_junc=[] + all_junc=[] + #=========================sampling uniquely mapped reads from population +- tmp=range(sample_start,sample_end,sample_step) ++ tmp=list(range(sample_start,sample_end,sample_step)) + tmp.append(100) + for pertl in tmp: #[5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95,100] + knownSpliceSites_num = 0 +@@ -1907,26 +1907,26 @@ class QCSAM: + if index_st < 0: index_st = 0 + sample_size += index_end -index_st + +- print >>sys.stderr, "sampling " + str(pertl) +"% (" + str(sample_size) + ") unique splicing alignments ...", ++ print("sampling " + str(pertl) +"% (" + str(sample_size) + ") unique splicing alignments ...", end=' ', file=sys.stderr) + for i in range(index_st, index_end): + uniqSpliceSites[samSpliceSites[i]] +=1 +- all_junc.append(str(len(uniqSpliceSites.keys()))) ++ all_junc.append(str(len(list(uniqSpliceSites.keys())))) + for sj in uniqSpliceSites: + if sj in knownSpliceSites and uniqSpliceSites[sj] >= recur: + knownSpliceSites_num +=1 +- print >>sys.stderr, str(knownSpliceSites_num) + " known splicing junctions" ++ print(str(knownSpliceSites_num) + " known splicing junctions", file=sys.stderr) + known_junc.append(str(knownSpliceSites_num)) + + #for j in uniq_SJ: + #print >>OUT, j + "\t" + str(uniq_SJ[j]) +- print >>OUT, "pdf('junction_saturation.pdf')" +- print >>OUT, "x=c(" + ','.join([str(i) for i in tmp]) + ')' +- print >>OUT, "y=c(" + ','.join(known_junc) + ')' +- print >>OUT, "z=c(" + ','.join(all_junc) + ')' +- print >>OUT, "plot(x,z/1000,xlab='percent of total reads',ylab='Number of splicing junctions (x1000)',type='o',col='blue',ylim=c(%d,%d))" % (int(int(known_junc[0])/1000), int(int(all_junc[-1])/1000)) +- print >>OUT, "points(x,y/1000,type='o',col='red')" +- print >>OUT, 'legend(5,%d, legend=c("All detected junction","Annotated junction"),col=c("blue","red"),lwd=1,pch=1)' % int(int(all_junc[-1])/1000) +- print >>OUT, "dev.off()" ++ print("pdf('junction_saturation.pdf')", file=OUT) ++ print("x=c(" + ','.join([str(i) for i in tmp]) + ')', file=OUT) ++ print("y=c(" + ','.join(known_junc) + ')', file=OUT) ++ print("z=c(" + ','.join(all_junc) + ')', file=OUT) ++ print("plot(x,z/1000,xlab='percent of total reads',ylab='Number of splicing junctions (x1000)',type='o',col='blue',ylim=c(%d,%d))" % (int(int(known_junc[0])/1000), int(int(all_junc[-1])/1000)), file=OUT) ++ print("points(x,y/1000,type='o',col='red')", file=OUT) ++ print('legend(5,%d, legend=c("All detected junction","Annotated junction"),col=c("blue","red"),lwd=1,pch=1)' % int(int(all_junc[-1])/1000), file=OUT) ++ print("dev.off()", file=OUT) + + + def annotate_junction(self,refgene,outfile=None,min_intron=50): +@@ -1941,7 +1941,7 @@ class QCSAM: + out_file = outfile + ".junction.xls" + out_file2 = outfile + ".junction_plot.r" + if refgene is None: +- print >>sys.stderr, "You must provide reference gene model in bed format." ++ print("You must provide reference gene model in bed format.", file=sys.stderr) + sys.exit(1) + OUT = open(out_file,'w') + ROUT = open(out_file2,'w') +@@ -1955,13 +1955,13 @@ class QCSAM: + known_junc =0 + splicing_events=collections.defaultdict(int) + +- print >>sys.stderr, "\treading reference bed file: ",refgene, " ... ", ++ print("\treading reference bed file: ",refgene, " ... ", end=' ', file=sys.stderr) + for line in open(refgene,'r'): + if line.startswith(('#','track','browser')):continue + # Parse fields from gene tabls + fields = line.split() + if(len(fields)<12): +- print >>sys.stderr, "Invalid bed line (skipped):",line, ++ print("Invalid bed line (skipped):",line, end=' ', file=sys.stderr) + continue + chrom = fields[0].upper() + tx_start = int( fields[1] ) +@@ -1969,19 +1969,19 @@ class QCSAM: + if int(fields[9] ==1): + continue + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + intron_start = exon_ends[:-1] + intron_end=exon_starts[1:] + for i_st,i_end in zip (intron_start, intron_end): + refIntronStarts[chrom][i_st] =i_st + refIntronEnds[chrom][i_end] =i_end +- print >>sys.stderr,"Done" ++ print("Done", file=sys.stderr) + + #reading input SAM file +- print >>sys.stderr, "\tProcessing "+ self.fileName + '...', ++ print("\tProcessing "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -2022,25 +2022,25 @@ class QCSAM: + intron_end = blockStart[1:] + for i_st,i_end in zip(intron_st, intron_end): + splicing_events[chrom + ":" + str(i_st) + ":" + str(i_end)] += 1 +- if (refIntronStarts[chrom].has_key(i_st) and refIntronEnds[chrom].has_key(i_end)): ++ if (i_st in refIntronStarts[chrom] and i_end in refIntronEnds[chrom]): + known_junc +=1 #known both +- elif (not refIntronStarts[chrom].has_key(i_st) and not refIntronEnds[chrom].has_key(i_end)): ++ elif (i_st not in refIntronStarts[chrom] and i_end not in refIntronEnds[chrom]): + novel35_junc +=1 + else: + novel3or5_junc +=1 + #self.f.seek(0) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>ROUT, 'pdf("splicing_events_pie.pdf")' +- print >>ROUT, "events=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc)])+ ')' +- print >>ROUT, 'pie(events,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing events",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)) +- print >>ROUT, "dev.off()" ++ print('pdf("splicing_events_pie.pdf")', file=ROUT) ++ print("events=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc)])+ ')', file=ROUT) ++ print('pie(events,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing events",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)), file=ROUT) ++ print("dev.off()", file=ROUT) + +- print >>sys.stderr, "\n===================================================================" +- print >>sys.stderr, "Total splicing Events:\t" + str(total_junc) +- print >>sys.stderr, "Known Splicing Events:\t" + str(known_junc) +- print >>sys.stderr, "Partial Novel Splicing Events:\t" + str(novel3or5_junc) +- print >>sys.stderr, "Novel Splicing Events:\t" + str(novel35_junc) ++ print("\n===================================================================", file=sys.stderr) ++ print("Total splicing Events:\t" + str(total_junc), file=sys.stderr) ++ print("Known Splicing Events:\t" + str(known_junc), file=sys.stderr) ++ print("Partial Novel Splicing Events:\t" + str(novel3or5_junc), file=sys.stderr) ++ print("Novel Splicing Events:\t" + str(novel35_junc), file=sys.stderr) + + #reset variables + total_junc =0 +@@ -2048,34 +2048,34 @@ class QCSAM: + novel3or5_junc =0 + known_junc =0 + +- print >>OUT, "chrom\tintron_st(0-based)\tintron_end(1-based)\tread_count\tannotation" ++ print("chrom\tintron_st(0-based)\tintron_end(1-based)\tread_count\tannotation", file=OUT) + for i in splicing_events: + total_junc += 1 + (chrom, i_st, i_end) = i.split(":") +- print >>OUT, '\t'.join([chrom.replace("CHR","chr"),i_st,i_end]) + '\t' + str(splicing_events[i]) + '\t', ++ print('\t'.join([chrom.replace("CHR","chr"),i_st,i_end]) + '\t' + str(splicing_events[i]) + '\t', end=' ', file=OUT) + i_st = int(i_st) + i_end = int(i_end) +- if (refIntronStarts[chrom].has_key(i_st) and refIntronEnds[chrom].has_key(i_end)): +- print >>OUT, "annotated" ++ if (i_st in refIntronStarts[chrom] and i_end in refIntronEnds[chrom]): ++ print("annotated", file=OUT) + known_junc +=1 +- elif (not refIntronStarts[chrom].has_key(i_st) and not refIntronEnds[chrom].has_key(i_end)): +- print >>OUT, 'complete_novel' ++ elif (i_st not in refIntronStarts[chrom] and i_end not in refIntronEnds[chrom]): ++ print('complete_novel', file=OUT) + novel35_junc +=1 + else: +- print >>OUT, 'partial_novel' ++ print('partial_novel', file=OUT) + novel3or5_junc +=1 + + +- print >>sys.stderr, "\nTotal splicing Junctions:\t" + str(total_junc) +- print >>sys.stderr, "Known Splicing Junctions:\t" + str(known_junc) +- print >>sys.stderr, "Partial Novel Splicing Junctions:\t" + str(novel3or5_junc) +- print >>sys.stderr, "Novel Splicing Junctions:\t" + str(novel35_junc) +- print >>sys.stderr, "\n===================================================================" ++ print("\nTotal splicing Junctions:\t" + str(total_junc), file=sys.stderr) ++ print("Known Splicing Junctions:\t" + str(known_junc), file=sys.stderr) ++ print("Partial Novel Splicing Junctions:\t" + str(novel3or5_junc), file=sys.stderr) ++ print("Novel Splicing Junctions:\t" + str(novel35_junc), file=sys.stderr) ++ print("\n===================================================================", file=sys.stderr) + +- print >>ROUT, 'pdf("splicing_junction_pie.pdf")' +- print >>ROUT, "junction=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc,)])+ ')' +- print >>ROUT, 'pie(junction,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing junctions",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)) +- print >>ROUT, "dev.off()" ++ print('pdf("splicing_junction_pie.pdf")', file=ROUT) ++ print("junction=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc,)])+ ')', file=ROUT) ++ print('pie(junction,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing junctions",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)), file=ROUT) ++ print("dev.off()", file=ROUT) + #print >>ROUT, "mat=matrix(c(events,junction),byrow=T,ncol=3)" + #print >>ROUT, 'barplot(mat,beside=T,ylim=c(0,100),names=c("known","partial\nnovel","complete\nnovel"),legend.text=c("splicing events","splicing junction"),ylab="Percent")' + +@@ -2083,7 +2083,7 @@ class QCSAM: + '''calculate mRNA's RPKM value''' + + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + if outfile is None: + rpkm_file = self.fileName + ".RPKM.xls" +@@ -2101,7 +2101,7 @@ class QCSAM: + RPKM_head=['chr','start','end','name','score','strand','length','rawCount','RPKM'] + + #read SAM +- print >>sys.stderr, "Reading "+ self.fileName + '...', ++ print("Reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -2131,10 +2131,10 @@ class QCSAM: + else: + ranges[chrom].add_interval( Interval( mid, mid ) ) + +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + +- print >>sys.stderr, "Calculating RPKM ...", ++ print("Calculating RPKM ...", end=' ', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith(('#','track','browser')):continue +@@ -2145,14 +2145,14 @@ class QCSAM: + tx_end = int( fields[2] ) + geneName = fields[3] + strand = fields[5].replace(" ","_") +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends) +- exon_sizes = map(int,fields[10].rstrip(',\n').split(',')) ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)) ++ exon_sizes = list(map(int,fields[10].rstrip(',\n').split(','))) + key='\t'.join((chrom.lower(),str(tx_start),str(tx_end),geneName,'0',strand)) + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, file=sys.stderr) + continue + mRNA_count=0 #we need to initializ it to 0 for each gene + mRNA_len=sum(exon_sizes) +@@ -2164,14 +2164,14 @@ class QCSAM: + mRNAlen_table[key] = mRNA_len + RPKM_table[key] = str(mRNA_RPKM) + rawCount_table[key] = str(mRNA_count) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>RPKM_OUT, '\t'.join(RPKM_head) ++ print('\t'.join(RPKM_head), file=RPKM_OUT) + for k in RPKM_table: +- print >>RPKM_OUT, k + '\t', +- print >>RPKM_OUT, str(mRNAlen_table[k]) + '\t', +- print >>RPKM_OUT, str(rawCount_table[k]) + '\t', +- print >>RPKM_OUT, str(RPKM_table[k]) + '\t' ++ print(k + '\t', end=' ', file=RPKM_OUT) ++ print(str(mRNAlen_table[k]) + '\t', end=' ', file=RPKM_OUT) ++ print(str(rawCount_table[k]) + '\t', end=' ', file=RPKM_OUT) ++ print(str(RPKM_table[k]) + '\t', file=RPKM_OUT) + return RPKM_table + self.f.seek(0) + +@@ -2180,10 +2180,10 @@ class QCSAM: + use the parental gene as standard, for spliced read, use the splicing motif as strandard''' + + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + if genome is None: +- print >>sys.stderr,"You must specify genome sequence in fasta format\n" ++ print("You must specify genome sequence in fasta format\n", file=sys.stderr) + exit(0) + + if outfile is None: +@@ -2191,19 +2191,19 @@ class QCSAM: + else: + strand_file = outfile + ".strand.infor" + OUT = open(strand_file,'w') +- print >>OUT,"read_type\tread_id\tread_seq\tchr\tStart\tCigar\tprotocol_strand\tgene_strand" ++ print("read_type\tread_id\tread_seq\tchr\tStart\tCigar\tprotocol_strand\tgene_strand", file=OUT) + + transtab = string.maketrans("ACGTNX","TGCANX") + motif=sp.upper().split(',') + motif_rev = [m.translate(transtab)[::-1] for m in motif] + + #load genome +- print >>sys.stderr, "\tloading "+genome+'...' ++ print("\tloading "+genome+'...', file=sys.stderr) + tmp=fasta.Fasta(genome) + + #load reference gene model + gene_ranges={} +- print >>sys.stderr, "reading reference gene model ...", ++ print("reading reference gene model ...", end=' ', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith(('#','track','browser')):continue +@@ -2215,12 +2215,12 @@ class QCSAM: + geneName = fields[3] + strand = fields[5] + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, file=sys.stderr) + continue + if chrom not in gene_ranges: + gene_ranges[chrom]=Intersecter() + gene_ranges[chrom].insert(tx_start,tx_end,strand) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + #read SAM + +@@ -2228,7 +2228,7 @@ class QCSAM: + strand_from_protocol = 'unknown' + strand_from_gene='unknown' + strand_stat=collections.defaultdict(int) +- print >>sys.stderr, "Reading "+ self.fileName + '...', ++ print("Reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -2258,9 +2258,9 @@ class QCSAM: + else: + strand_from_gene="intergenic" + +- print >>OUT,read_type + '\t' + fields[0] + '\t' + fields[9] + '\t' + fields[2] + '\t' + fields[3] + '\t' + fields[5] +'\t', +- print >>OUT,strand_from_protocol + '\t' + strand_from_gene +- strand_stat[read_type + '\t' + strand_from_protocol +'\t' + strand_from_gene] +=1 ++ print(read_type + '\t' + fields[0] + '\t' + fields[9] + '\t' + fields[2] + '\t' + fields[3] + '\t' + fields[5] +'\t', end=' ', file=OUT) ++ print(strand_from_protocol + '\t' + strand_from_gene, file=OUT) ++ strand_stat[read_type + '\t' + strand_from_protocol +'\t' + strand_from_gene] +=1 + + + #for spliced read +@@ -2273,14 +2273,14 @@ class QCSAM: + blockStart.append(readStart + sum(comb[:i]) ) + for i in range(0,len(comb),2): + blockSize.append(comb[i]) +- blockEnd=map((lambda x,y:x+y),blockStart,blockSize) ++ blockEnd=list(map((lambda x,y:x+y),blockStart,blockSize)) + intron_start=blockEnd[:-1] + intron_end=blockStart[1:] + for st,end in zip(intron_start,intron_end): + try: + splice_motif = str(tmp.fetchSeq(chrom, st, st+2)) + str(tmp.fetchSeq(chrom, end-2,end)) + except: +- print line ++ print(line) + if splice_motif in motif: + splice_strand.append('+') + elif splice_motif in motif_rev: +@@ -2293,16 +2293,16 @@ class QCSAM: + strand_from_splice = 'unknown motif' + else: + strand_from_splice = set(splice_strand).pop() +- print >>OUT,read_type + '\t' + fields[0] + '\t' + fields[9] + '\t' + fields[2] + '\t' + fields[3] + '\t' + fields[5] +'\t', +- print >>OUT,strand_from_protocol + '\t' + strand_from_splice ++ print(read_type + '\t' + fields[0] + '\t' + fields[9] + '\t' + fields[2] + '\t' + fields[3] + '\t' + fields[5] +'\t', end=' ', file=OUT) ++ print(strand_from_protocol + '\t' + strand_from_splice, file=OUT) + + strand_stat[read_type + '\t' + strand_from_protocol +'\t' + strand_from_splice] +=1 + +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print "read_type\tstrand_expected\tstrand_observed\tcount" ++ print("read_type\tstrand_expected\tstrand_observed\tcount") + for i in sorted(strand_stat): +- print str(i) +'\t' + str(strand_stat[i]) ++ print(str(i) +'\t' + str(strand_stat[i])) + + def clipping_profile(self,outfile=None): + '''calculate profile of soft clipping''' +@@ -2315,7 +2315,7 @@ class QCSAM: + + OUT=open(out_file1,'w') + ROUT=open(out_file2,'w') +- print >>OUT, "Position\tRead_Total\tRead_clipped" ++ print("Position\tRead_Total\tRead_clipped", file=OUT) + soft_p = re.compile(r'(.*?)(\d+)S') + read_part = re.compile(r'(\d+)[MIS=X]') + total_read =0 +@@ -2324,7 +2324,7 @@ class QCSAM: + + read_pos=[] + clip_count=[] +- print >>sys.stderr, "Reading "+ self.fileName + '...' ++ print("Reading "+ self.fileName + '...', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -2349,12 +2349,12 @@ class QCSAM: + for i in soft_clip_profile: + read_pos.append(str(i)) + clip_count.append(str(soft_clip_profile[i])) +- print >>OUT, str(i) + '\t' + str(total_read) + '\t' + str(soft_clip_profile[i]) +- print >>ROUT, "pdf('clipping_profile.pdf')" +- print >>ROUT, "read_pos=c(" + ','.join(read_pos) + ')' +- print >>ROUT, "count=c(" + ','.join(clip_count) + ')' +- print >>ROUT, 'plot(read_pos,1-(count/%d),col="blue",main="clipping profile",xlab="Position of reads",ylab="Mappability",type="b")' % total_read +- print >>ROUT, "dev.off()" ++ print(str(i) + '\t' + str(total_read) + '\t' + str(soft_clip_profile[i]), file=OUT) ++ print("pdf('clipping_profile.pdf')", file=ROUT) ++ print("read_pos=c(" + ','.join(read_pos) + ')', file=ROUT) ++ print("count=c(" + ','.join(clip_count) + ')', file=ROUT) ++ print('plot(read_pos,1-(count/%d),col="blue",main="clipping profile",xlab="Position of reads",ylab="Mappability",type="b")' % total_read, file=ROUT) ++ print("dev.off()", file=ROUT) + + def insertion_profile(self,read_len,outfile=None): + '''calculate profile of insertion (insertion means insertion to the reference)''' +@@ -2367,13 +2367,13 @@ class QCSAM: + + OUT=open(out_file1,'w') + ROUT=open(out_file2,'w') +- print >>OUT, "Position\tRead_Total\tRead_clipped" ++ print("Position\tRead_Total\tRead_clipped", file=OUT) + soft_p = re.compile(r'(.*?)(\d+)I') + read_part = re.compile(r'(\d+)[MIS=X]') + total_read =0 + skip_part_of_read =0 + soft_clip_profile=collections.defaultdict(int) +- print >>sys.stderr, "Reading "+ self.fileName + '...', ++ print("Reading "+ self.fileName + '...', end=' ', file=sys.stderr) + for line in self.f: + if line.startswith("@"):continue + fields=line.rstrip('\n ').split() +@@ -2396,7 +2396,7 @@ class QCSAM: + soft_clip_profile[n]+=1 + skip_part_of_read += int(j[1]) + for i in range(0,read_len): +- print >>OUT, str(i) + '\t' + str(total_read) + '\t' + str(soft_clip_profile[i]) ++ print(str(i) + '\t' + str(total_read) + '\t' + str(soft_clip_profile[i]), file=OUT) + + class ParseBAM: + '''This class provides fuctions to parsing/processing/transforming SAM or BAM files. The input +@@ -2408,13 +2408,13 @@ class ParseBAM: + try: + self.samfile = pysam.Samfile(inputFile,'rb') + if len(self.samfile.header) ==0: +- print >>sys.stderr, "BAM/SAM file has no header section. Exit!" ++ print("BAM/SAM file has no header section. Exit!", file=sys.stderr) + sys.exit(1) + self.bam_format = True + except: + self.samfile = pysam.Samfile(inputFile,'r') + if len(self.samfile.header) ==0: +- print >>sys.stderr, "BAM/SAM file has no header section. Exit!" ++ print("BAM/SAM file has no header section. Exit!", file=sys.stderr) + sys.exit(1) + self.bam_format = False + +@@ -2437,13 +2437,13 @@ class ParseBAM: + R_splice=0 + R_properPair =0 + +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): + flag=0 +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + R_total +=1 + if aligned_read.is_qcfail: #skip QC fail read + R_qc_fail +=1 +@@ -2487,26 +2487,26 @@ class ParseBAM: + R_properPair +=1 + + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + #self.samfile.seek(current_pos) + +- print >>sys.stderr,"\n#==================================================" +- print >>sys.stderr, "%-30s%d" % ("Total Reads (Records):",R_total) +- print >>sys.stderr, "\n", +- print >>sys.stderr, "%-30s%d" % ("QC failed:",R_qc_fail) +- print >>sys.stderr, "%-30s%d" % ("Optical/PCR duplicate:", R_duplicate) +- print >>sys.stderr, "%-30s%d" % ("Non Primary Hits", R_nonprimary) +- print >>sys.stderr, "%-30s%d" % ("Unmapped reads:",R_unmap) +- print >>sys.stderr, "%-30s%d" % ("Multiple mapped reads:",R_multipleHit) +- print >>sys.stderr, "\n", +- print >>sys.stderr, "%-30s%d" % ("Uniquely mapped:",R_uniqHit) +- print >>sys.stderr, "%-30s%d" % ("Read-1:",R_read1) +- print >>sys.stderr, "%-30s%d" % ("Read-2:",R_read2) +- print >>sys.stderr, "%-30s%d" % ("Reads map to '+':",R_forward) +- print >>sys.stderr, "%-30s%d" % ("Reads map to '-':",R_reverse) +- print >>sys.stderr, "%-30s%d" % ("Non-splice reads:",R_nonSplice) +- print >>sys.stderr, "%-30s%d" % ("Splice reads:",R_splice) +- print >>sys.stderr, "%-30s%d" % ("Reads mapped in proper pairs:",R_properPair) ++ print("\n#==================================================", file=sys.stderr) ++ print("%-30s%d" % ("Total Reads (Records):",R_total), file=sys.stderr) ++ print("\n", end=' ', file=sys.stderr) ++ print("%-30s%d" % ("QC failed:",R_qc_fail), file=sys.stderr) ++ print("%-30s%d" % ("Optical/PCR duplicate:", R_duplicate), file=sys.stderr) ++ print("%-30s%d" % ("Non Primary Hits", R_nonprimary), file=sys.stderr) ++ print("%-30s%d" % ("Unmapped reads:",R_unmap), file=sys.stderr) ++ print("%-30s%d" % ("Multiple mapped reads:",R_multipleHit), file=sys.stderr) ++ print("\n", end=' ', file=sys.stderr) ++ print("%-30s%d" % ("Uniquely mapped:",R_uniqHit), file=sys.stderr) ++ print("%-30s%d" % ("Read-1:",R_read1), file=sys.stderr) ++ print("%-30s%d" % ("Read-2:",R_read2), file=sys.stderr) ++ print("%-30s%d" % ("Reads map to '+':",R_forward), file=sys.stderr) ++ print("%-30s%d" % ("Reads map to '-':",R_reverse), file=sys.stderr) ++ print("%-30s%d" % ("Non-splice reads:",R_nonSplice), file=sys.stderr) ++ print("%-30s%d" % ("Splice reads:",R_splice), file=sys.stderr) ++ print("%-30s%d" % ("Reads mapped in proper pairs:",R_properPair), file=sys.stderr) + + def configure_experiment(self,refbed,sample_size = 200000): + '''Given a BAM/SAM file, this function will try to guess the RNA-seq experiment: +@@ -2521,7 +2521,7 @@ class ParseBAM: + s_strandness=collections.defaultdict(int) + #load reference gene model + gene_ranges={} +- print >>sys.stderr, "Reading reference gene model " + refbed + ' ...', ++ print("Reading reference gene model " + refbed + ' ...', end=' ', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith(('#','track','browser')):continue +@@ -2533,22 +2533,22 @@ class ParseBAM: + geneName = fields[3] + strand = fields[5] + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, file=sys.stderr) + continue + if chrom not in gene_ranges: + gene_ranges[chrom]=Intersecter() + gene_ranges[chrom].insert(tx_start,tx_end,strand) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + #read SAM/BAM file + #current_pos = self.samfile.tell() +- print >>sys.stderr, "Loading SAM/BAM file ... ", ++ print("Loading SAM/BAM file ... ", end=' ', file=sys.stderr) + try: + while(1): + if count >= sample_size: + break + flag=0 +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_qcfail: #skip low quanlity + continue + if aligned_read.is_duplicate: #skip duplicate read +@@ -2596,10 +2596,10 @@ class ParseBAM: + count += 1 + + except StopIteration: +- print >>sys.stderr, "Finished" ++ print("Finished", file=sys.stderr) + #self.samfile.seek(current_pos) + +- print >>sys.stderr, "Total " + str(count) + " usable reads were sampled" ++ print("Total " + str(count) + " usable reads were sampled", file=sys.stderr) + protocol="unknown" + strandness=None + spec1=0.0 +@@ -2640,7 +2640,7 @@ class ParseBAM: + elif len(strand_rule.split(',')) ==2: #singeEnd, strand-specific + for i in strand_rule.split(','):strandRule[i[0]]=i[1] + else: +- print >>sys.stderr, "Unknown value of option :'strand_rule' " + strand_rule ++ print("Unknown value of option :'strand_rule' " + strand_rule, file=sys.stderr) + sys.exit(1) + if len(strandRule) == 0: + FWO = open(outfile + '.wig','w') +@@ -2650,13 +2650,13 @@ class ParseBAM: + + + read_id='' +- for chr_name, chr_size in chrom_sizes.items(): #iterate each chrom ++ for chr_name, chr_size in list(chrom_sizes.items()): #iterate each chrom + try: + self.samfile.fetch(chr_name,0,chr_size) + except: +- print >>sys.stderr, "No alignments for " + chr_name + '. skipped' ++ print("No alignments for " + chr_name + '. skipped', file=sys.stderr) + continue +- print >>sys.stderr, "Processing " + chr_name + " ..." ++ print("Processing " + chr_name + " ...", file=sys.stderr) + if len(strandRule) == 0: FWO.write('variableStep chrom='+chr_name+'\n') + else: + FWO.write('variableStep chrom='+chr_name+'\n') +@@ -2699,12 +2699,12 @@ class ParseBAM: + + if len(strandRule) == 0: #this is NOT strand specific. + for pos in sorted (Fwig.keys()): +- print >>FWO, "%d\t%.2f" % (pos,Fwig[pos]) ++ print("%d\t%.2f" % (pos,Fwig[pos]), file=FWO) + else: + for pos in sorted (Fwig.keys()): +- print >>FWO, "%d\t%.2f" % (pos,Fwig[pos]) ++ print("%d\t%.2f" % (pos,Fwig[pos]), file=FWO) + for pos in sorted (Rwig.keys()): +- print >>RVO, "%d\t%.2f" % (pos,Rwig[pos]) ++ print("%d\t%.2f" % (pos,Rwig[pos]), file=RVO) + + + def calculate_rpkm(self,geneFile,outfile,strand_rule=None): +@@ -2732,7 +2732,7 @@ class ParseBAM: + elif len(strand_rule.split(',')) ==2: #singeEnd, strand-specific + for i in strand_rule.split(','):strandRule[i[0]]=i[1] + else: +- print >>sys.stderr, "Unknown value of option :'strand_rule' " + strand_rule ++ print("Unknown value of option :'strand_rule' " + strand_rule, file=sys.stderr) + sys.exit(1) + + uniq_read=0 +@@ -2744,14 +2744,14 @@ class ParseBAM: + rpkm_value={} + + RPKM_OUT = open(outfile,'w') +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + #current_pos = self.samfile.tell() + try: + while(1): + flag=0 +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_qcfail:continue #skip low quanlity + if aligned_read.is_duplicate:continue #skip duplicate read + if aligned_read.is_secondary:continue #skip non primary hit +@@ -2801,11 +2801,11 @@ class ParseBAM: + else:unstrand_ranges[chrom].add_interval( Interval( mid,mid+1 ) ) + + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + #self.samfile.seek(current_pos) +- print >>RPKM_OUT, "#Total uniquely mapped reads = " + str(uniq_read) +- print >>RPKM_OUT, "#Total fragments = " + str(total_tags) +- print >>sys.stderr, "Assign reads to "+ geneFile + '...', ++ print("#Total uniquely mapped reads = " + str(uniq_read), file=RPKM_OUT) ++ print("#Total fragments = " + str(total_tags), file=RPKM_OUT) ++ print("Assign reads to "+ geneFile + '...', end=' ', file=sys.stderr) + for line in open(geneFile,'r'): + try: + if line.startswith('#'):continue +@@ -2819,16 +2819,16 @@ class ParseBAM: + geneName = fields[3] + strand = fields[5].replace(" ","_") + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends) +- exon_sizes = map(int,fields[10].rstrip(',\n').split(',')) ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)) ++ exon_sizes = list(map(int,fields[10].rstrip(',\n').split(','))) + intron_starts = exon_ends[:-1] + intron_ends=exon_starts[1:] + key='\t'.join((chrom.lower(),str(tx_start),str(tx_end),geneName,'0',strand)) + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + + +@@ -2892,7 +2892,7 @@ class ParseBAM: + RPKM_OUT.write(chrom.lower() + "\t" + str(tx_start) + "\t" + str(tx_end) + "\t" + geneName + "_mRNA" + "\t" + str(mRNA_count) + "\t" + strand + '\t' + str(mRNA_count*1000000000.0/(mRNA_len*total_tags)) +'\n') + except: + RPKM_OUT.write(chrom.lower() + "\t" + str(tx_start) + "\t" + str(tx_end) + "\t" + geneName + "_mRNA" + "\t" + str(0) + "\t" + strand + '\t' + str(0) +'\n') +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + def readsNVC(self,outfile=None,nx=True): + '''for each read, calculate nucleotide frequency vs position''' +@@ -2914,12 +2914,12 @@ class ParseBAM: + t_count=[] + n_count=[] + x_count=[] +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + #if aligned_read.is_unmapped:continue #skip unmapped read + #if aligned_read.is_qcfail:continue #skip low quality + RNA_read = aligned_read.seq.upper() +@@ -2929,62 +2929,62 @@ class ParseBAM: + key = str(i) + j + base_freq[key] += 1 + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>sys.stderr, "generating data matrix ..." +- print >>FO, "Position\tA\tC\tG\tT\tN\tX" +- for i in xrange(len(RNA_read)): +- print >>FO, str(i) + '\t', +- print >>FO, str(base_freq[str(i) + "A"]) + '\t', ++ print("generating data matrix ...", file=sys.stderr) ++ print("Position\tA\tC\tG\tT\tN\tX", file=FO) ++ for i in range(len(RNA_read)): ++ print(str(i) + '\t', end=' ', file=FO) ++ print(str(base_freq[str(i) + "A"]) + '\t', end=' ', file=FO) + a_count.append(str(base_freq[str(i) + "A"])) +- print >>FO, str(base_freq[str(i) + "C"]) + '\t', ++ print(str(base_freq[str(i) + "C"]) + '\t', end=' ', file=FO) + c_count.append(str(base_freq[str(i) + "C"])) +- print >>FO, str(base_freq[str(i) + "G"]) + '\t', ++ print(str(base_freq[str(i) + "G"]) + '\t', end=' ', file=FO) + g_count.append(str(base_freq[str(i) + "G"])) +- print >>FO, str(base_freq[str(i) + "T"]) + '\t', ++ print(str(base_freq[str(i) + "T"]) + '\t', end=' ', file=FO) + t_count.append(str(base_freq[str(i) + "T"])) +- print >>FO, str(base_freq[str(i) + "N"]) + '\t', ++ print(str(base_freq[str(i) + "N"]) + '\t', end=' ', file=FO) + n_count.append(str(base_freq[str(i) + "N"])) +- print >>FO, str(base_freq[str(i) + "X"]) + '\t' ++ print(str(base_freq[str(i) + "X"]) + '\t', file=FO) + x_count.append(str(base_freq[str(i) + "X"])) + FO.close() + + #generating R scripts +- print >>sys.stderr, "generating R script ..." +- print >>RS, "position=c(" + ','.join([str(i) for i in xrange(len(RNA_read))]) + ')' +- print >>RS, "A_count=c(" + ','.join(a_count) + ')' +- print >>RS, "C_count=c(" + ','.join(c_count) + ')' +- print >>RS, "G_count=c(" + ','.join(g_count) + ')' +- print >>RS, "T_count=c(" + ','.join(t_count) + ')' +- print >>RS, "N_count=c(" + ','.join(n_count) + ')' +- print >>RS, "X_count=c(" + ','.join(x_count) + ')' ++ print("generating R script ...", file=sys.stderr) ++ print("position=c(" + ','.join([str(i) for i in range(len(RNA_read))]) + ')', file=RS) ++ print("A_count=c(" + ','.join(a_count) + ')', file=RS) ++ print("C_count=c(" + ','.join(c_count) + ')', file=RS) ++ print("G_count=c(" + ','.join(g_count) + ')', file=RS) ++ print("T_count=c(" + ','.join(t_count) + ')', file=RS) ++ print("N_count=c(" + ','.join(n_count) + ')', file=RS) ++ print("X_count=c(" + ','.join(x_count) + ')', file=RS) + + if nx: +- print >>RS, "total= A_count + C_count + G_count + T_count + N_count + X_count" +- print >>RS, "ym=max(A_count/total,C_count/total,G_count/total,T_count/total,N_count/total,X_count/total) + 0.05" +- print >>RS, "yn=min(A_count/total,C_count/total,G_count/total,T_count/total,N_count/total,X_count/total)" ++ print("total= A_count + C_count + G_count + T_count + N_count + X_count", file=RS) ++ print("ym=max(A_count/total,C_count/total,G_count/total,T_count/total,N_count/total,X_count/total) + 0.05", file=RS) ++ print("yn=min(A_count/total,C_count/total,G_count/total,T_count/total,N_count/total,X_count/total)", file=RS) + +- print >>RS, 'pdf(\"%s\")' % (outfile +".NVC_plot.pdf") +- print >>RS, 'plot(position,A_count/total,type="o",pch=20,ylim=c(yn,ym),col="dark green",xlab="Position of Read",ylab="Nucleotide Frequency")' +- print >>RS, 'lines(position,T_count/total,type="o",pch=20,col="red")' +- print >>RS, 'lines(position,G_count/total,type="o",pch=20,col="blue")' +- print >>RS, 'lines(position,C_count/total,type="o",pch=20,col="cyan")' +- print >>RS, 'lines(position,N_count/total,type="o",pch=20,col="black")' +- print >>RS, 'lines(position,X_count/total,type="o",pch=20,col="grey")' +- print >>RS, 'legend('+ str(len(RNA_read)-10) + ',ym,legend=c("A","T","G","C","N","X"),col=c("dark green","red","blue","cyan","black","grey"),lwd=2,pch=20,text.col=c("dark green","red","blue","cyan","black","grey"))' +- print >>RS, "dev.off()" ++ print('pdf(\"%s\")' % (outfile +".NVC_plot.pdf"), file=RS) ++ print('plot(position,A_count/total,type="o",pch=20,ylim=c(yn,ym),col="dark green",xlab="Position of Read",ylab="Nucleotide Frequency")', file=RS) ++ print('lines(position,T_count/total,type="o",pch=20,col="red")', file=RS) ++ print('lines(position,G_count/total,type="o",pch=20,col="blue")', file=RS) ++ print('lines(position,C_count/total,type="o",pch=20,col="cyan")', file=RS) ++ print('lines(position,N_count/total,type="o",pch=20,col="black")', file=RS) ++ print('lines(position,X_count/total,type="o",pch=20,col="grey")', file=RS) ++ print('legend('+ str(len(RNA_read)-10) + ',ym,legend=c("A","T","G","C","N","X"),col=c("dark green","red","blue","cyan","black","grey"),lwd=2,pch=20,text.col=c("dark green","red","blue","cyan","black","grey"))', file=RS) ++ print("dev.off()", file=RS) + else: +- print >>RS, "total= A_count + C_count + G_count + T_count" +- print >>RS, "ym=max(A_count/total,C_count/total,G_count/total,T_count/total) + 0.05" +- print >>RS, "yn=min(A_count/total,C_count/total,G_count/total,T_count/total)" ++ print("total= A_count + C_count + G_count + T_count", file=RS) ++ print("ym=max(A_count/total,C_count/total,G_count/total,T_count/total) + 0.05", file=RS) ++ print("yn=min(A_count/total,C_count/total,G_count/total,T_count/total)", file=RS) + +- print >>RS, 'pdf(\"%s\")' % (outfile +".NVC_plot.pdf") +- print >>RS, 'plot(position,A_count/total,type="o",pch=20,ylim=c(yn,ym),col="dark green",xlab="Position of Read",ylab="Nucleotide Frequency")' +- print >>RS, 'lines(position,T_count/total,type="o",pch=20,col="red")' +- print >>RS, 'lines(position,G_count/total,type="o",pch=20,col="blue")' +- print >>RS, 'lines(position,C_count/total,type="o",pch=20,col="cyan")' +- print >>RS, 'legend('+ str(len(RNA_read)-10) + ',ym,legend=c("A","T","G","C"),col=c("dark green","red","blue","cyan"),lwd=2,pch=20,text.col=c("dark green","red","blue","cyan"))' +- print >>RS, "dev.off()" ++ print('pdf(\"%s\")' % (outfile +".NVC_plot.pdf"), file=RS) ++ print('plot(position,A_count/total,type="o",pch=20,ylim=c(yn,ym),col="dark green",xlab="Position of Read",ylab="Nucleotide Frequency")', file=RS) ++ print('lines(position,T_count/total,type="o",pch=20,col="red")', file=RS) ++ print('lines(position,G_count/total,type="o",pch=20,col="blue")', file=RS) ++ print('lines(position,C_count/total,type="o",pch=20,col="cyan")', file=RS) ++ print('legend('+ str(len(RNA_read)-10) + ',ym,legend=c("A","T","G","C"),col=c("dark green","red","blue","cyan"),lwd=2,pch=20,text.col=c("dark green","red","blue","cyan"))', file=RS) ++ print("dev.off()", file=RS) + + RS.close() + #self.f.seek(0) +@@ -2995,8 +2995,8 @@ class ParseBAM: + output = outfile + ".qual.r" + FO=open(output,'w') + +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + quality = collections.defaultdict(dict) #read_pos=>quality score=>count + q_max = -1 +@@ -3005,7 +3005,7 @@ class ParseBAM: + i_box={} #key is read postion,value is + try: + while(1): +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + + #if aligned_read.is_unmapped:continue #skip unmapped read + #if aligned_read.is_qcfail:continue #skip low quality +@@ -3024,14 +3024,14 @@ class ParseBAM: + except: + quality[i][q] = 1 + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + for p in range(0,read_len): + #print str(p) + ':', + val=[] + occurrence=[] + for q in range(q_min,q_max+1): +- if quality.has_key(p) and quality[p].has_key(q): ++ if p in quality and q in quality[p]: + val.append(str(q)) + occurrence.append(str(quality[p][q])) + q_list.append(str(quality[p][q])) +@@ -3041,21 +3041,21 @@ class ParseBAM: + + + #generate R script for boxplot +- print >>FO, "pdf(\'%s\')" % (outfile + ".qual.boxplot.pdf") ++ print("pdf(\'%s\')" % (outfile + ".qual.boxplot.pdf"), file=FO) + for i in sorted(i_box): +- print >>FO,'p'+str(i) + '<-' + i_box[i] +- print >>FO, 'boxplot(' + ','.join(['p'+str(i) for i in i_box]) + ',xlab=\"Position of Read(5\'->3\')\",ylab=\"Phred Quality Score\",outline=F' + ')' +- print >>FO,"dev.off()" ++ print('p'+str(i) + '<-' + i_box[i], file=FO) ++ print('boxplot(' + ','.join(['p'+str(i) for i in i_box]) + ',xlab=\"Position of Read(5\'->3\')\",ylab=\"Phred Quality Score\",outline=F' + ')', file=FO) ++ print("dev.off()", file=FO) + + + #generate R script for heatmap +- print >>FO, '\n' +- print >>FO, "pdf(\'%s\')" % (outfile + ".qual.heatmap.pdf") +- print >>FO, "qual=c(" + ','.join(q_list) + ')' +- print >>FO, "mat=matrix(qual,ncol=%s,byrow=F)" % (read_len) +- print >>FO, 'Lab.palette <- colorRampPalette(c("blue", "orange", "red3","red2","red1","red"), space = "rgb",interpolate=c(\'spline\'))' +- print >>FO, "heatmap(mat,Rowv=NA,Colv=NA,xlab=\"Position of Read\",ylab=\"Phred Quality Score\",labRow=seq(from=%s,to=%s),col = Lab.palette(256),scale=\"none\" )" % (q_min,q_max) +- print >>FO, 'dev.off()' ++ print('\n', file=FO) ++ print("pdf(\'%s\')" % (outfile + ".qual.heatmap.pdf"), file=FO) ++ print("qual=c(" + ','.join(q_list) + ')', file=FO) ++ print("mat=matrix(qual,ncol=%s,byrow=F)" % (read_len), file=FO) ++ print('Lab.palette <- colorRampPalette(c("blue", "orange", "red3","red2","red1","red"), space = "rgb",interpolate=c(\'spline\'))', file=FO) ++ print("heatmap(mat,Rowv=NA,Colv=NA,xlab=\"Position of Read\",ylab=\"Phred Quality Score\",labRow=seq(from=%s,to=%s),col = Lab.palette(256),scale=\"none\" )" % (q_min,q_max), file=FO) ++ print('dev.off()', file=FO) + + + def readGC(self,outfile=None): +@@ -3071,12 +3071,12 @@ class ParseBAM: + + gc_hist=collections.defaultdict(int) #key is GC percent, value is count of reads + +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_unmapped:continue #skip unmapped read + if aligned_read.is_qcfail:continue #skip low quality + RNA_read = aligned_read.seq.upper() +@@ -3084,19 +3084,19 @@ class ParseBAM: + #print gc_percent + gc_hist[gc_percent] += 1 + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>sys.stderr, "writing GC content ..." +- print >>FO, "GC%\tread_count" +- for i in gc_hist.keys(): +- print >>FO, i + '\t' + str(gc_hist[i]) ++ print("writing GC content ...", file=sys.stderr) ++ print("GC%\tread_count", file=FO) ++ for i in list(gc_hist.keys()): ++ print(i + '\t' + str(gc_hist[i]), file=FO) + +- print >>sys.stderr, "writing R script ..." +- print >>RS, "pdf(\"%s\")" % (outfile + ".GC_plot.pdf") +- print >>RS, 'gc=rep(c(' + ','.join([i for i in gc_hist.keys()]) + '),' + 'times=c(' + ','.join([str(i) for i in gc_hist.values()]) + '))' +- print >>RS, 'hist(gc,probability=T,breaks=%d,xlab="GC content (%%)",ylab="Density of Reads",border="blue",main="")' % 100 ++ print("writing R script ...", file=sys.stderr) ++ print("pdf(\"%s\")" % (outfile + ".GC_plot.pdf"), file=RS) ++ print('gc=rep(c(' + ','.join([i for i in list(gc_hist.keys())]) + '),' + 'times=c(' + ','.join([str(i) for i in list(gc_hist.values())]) + '))', file=RS) ++ print('hist(gc,probability=T,breaks=%d,xlab="GC content (%%)",ylab="Density of Reads",border="blue",main="")' % 100, file=RS) + #print >>RS, "lines(density(gc),col='red')" +- print >>RS ,"dev.off()" ++ print("dev.off()", file=RS) + #self.f.seek(0) + + +@@ -3120,13 +3120,13 @@ class ParseBAM: + seqDup_count=collections.defaultdict(int) + posDup_count=collections.defaultdict(int) + +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): + exon_boundary="" +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_unmapped:continue #skip unmapped read + if aligned_read.is_qcfail:continue #skip low quality + RNA_read = aligned_read.seq.upper() +@@ -3142,40 +3142,40 @@ class ParseBAM: + posDup[key] +=1 + + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>sys.stderr, "report duplicte rate based on sequence ..." +- print >>SEQ, "Occurrence\tUniqReadNumber" +- for i in seqDup.values(): #key is occurence, value is uniq reads number (based on seq) ++ print("report duplicte rate based on sequence ...", file=sys.stderr) ++ print("Occurrence\tUniqReadNumber", file=SEQ) ++ for i in list(seqDup.values()): #key is occurence, value is uniq reads number (based on seq) + seqDup_count[i] +=1 +- for k in sorted(seqDup_count.iterkeys()): +- print >>SEQ, str(k) +'\t'+ str(seqDup_count[k]) ++ for k in sorted(seqDup_count.keys()): ++ print(str(k) +'\t'+ str(seqDup_count[k]), file=SEQ) + SEQ.close() + +- print >>sys.stderr, "report duplicte rate based on mapping ..." +- print >>POS, "Occurrence\tUniqReadNumber" +- for i in posDup.values(): #key is occurence, value is uniq reads number (based on coord) ++ print("report duplicte rate based on mapping ...", file=sys.stderr) ++ print("Occurrence\tUniqReadNumber", file=POS) ++ for i in list(posDup.values()): #key is occurence, value is uniq reads number (based on coord) + posDup_count[i] +=1 +- for k in sorted(posDup_count.iterkeys()): +- print >>POS, str(k) +'\t'+ str(posDup_count[k]) ++ for k in sorted(posDup_count.keys()): ++ print(str(k) +'\t'+ str(posDup_count[k]), file=POS) + POS.close() + + +- print >>sys.stderr, "generate R script ..." +- print >>RS, "pdf(\'%s\')" % (outfile +".DupRate_plot.pdf") +- print >>RS, "par(mar=c(5,4,4,5),las=0)" +- print >>RS, "seq_occ=c(" + ','.join([str(i) for i in sorted(seqDup_count.iterkeys()) ]) + ')' +- print >>RS, "seq_uniqRead=c(" + ','.join([str(seqDup_count[i]) for i in sorted(seqDup_count.iterkeys()) ]) + ')' +- print >>RS, "pos_occ=c(" + ','.join([str(i) for i in sorted(posDup_count.iterkeys()) ]) + ')' +- print >>RS, "pos_uniqRead=c(" + ','.join([str(posDup_count[i]) for i in sorted(posDup_count.iterkeys()) ]) + ')' +- print >>RS, "plot(pos_occ,log10(pos_uniqRead),ylab='Number of Reads (log10)',xlab='Frequency',pch=4,cex=0.8,col='blue',xlim=c(1,%d),yaxt='n')" % up_bound +- print >>RS, "points(seq_occ,log10(seq_uniqRead),pch=20,cex=0.8,col='red')" +- print >>RS, 'ym=floor(max(log10(pos_uniqRead)))' +- print >>RS, "legend(%d,ym,legend=c('Sequence-base','Mapping-base'),col=c('red','blue'),pch=c(4,20))" % max(up_bound-200,1) +- print >>RS, 'axis(side=2,at=0:ym,labels=0:ym)' +- print >>RS, 'axis(side=4,at=c(log10(pos_uniqRead[1]),log10(pos_uniqRead[2]),log10(pos_uniqRead[3]),log10(pos_uniqRead[4])), labels=c(round(pos_uniqRead[1]*100/sum(pos_uniqRead)),round(pos_uniqRead[2]*100/sum(pos_uniqRead)),round(pos_uniqRead[3]*100/sum(pos_uniqRead)),round(pos_uniqRead[4]*100/sum(pos_uniqRead))))' +- print >>RS, 'mtext(4, text = "Reads %", line = 2)' +- print >>RS, 'dev.off()' ++ print("generate R script ...", file=sys.stderr) ++ print("pdf(\'%s\')" % (outfile +".DupRate_plot.pdf"), file=RS) ++ print("par(mar=c(5,4,4,5),las=0)", file=RS) ++ print("seq_occ=c(" + ','.join([str(i) for i in sorted(seqDup_count.keys()) ]) + ')', file=RS) ++ print("seq_uniqRead=c(" + ','.join([str(seqDup_count[i]) for i in sorted(seqDup_count.keys()) ]) + ')', file=RS) ++ print("pos_occ=c(" + ','.join([str(i) for i in sorted(posDup_count.keys()) ]) + ')', file=RS) ++ print("pos_uniqRead=c(" + ','.join([str(posDup_count[i]) for i in sorted(posDup_count.keys()) ]) + ')', file=RS) ++ print("plot(pos_occ,log10(pos_uniqRead),ylab='Number of Reads (log10)',xlab='Frequency',pch=4,cex=0.8,col='blue',xlim=c(1,%d),yaxt='n')" % up_bound, file=RS) ++ print("points(seq_occ,log10(seq_uniqRead),pch=20,cex=0.8,col='red')", file=RS) ++ print('ym=floor(max(log10(pos_uniqRead)))', file=RS) ++ print("legend(%d,ym,legend=c('Sequence-base','Mapping-base'),col=c('red','blue'),pch=c(4,20))" % max(up_bound-200,1), file=RS) ++ print('axis(side=2,at=0:ym,labels=0:ym)', file=RS) ++ print('axis(side=4,at=c(log10(pos_uniqRead[1]),log10(pos_uniqRead[2]),log10(pos_uniqRead[3]),log10(pos_uniqRead[4])), labels=c(round(pos_uniqRead[1]*100/sum(pos_uniqRead)),round(pos_uniqRead[2]*100/sum(pos_uniqRead)),round(pos_uniqRead[3]*100/sum(pos_uniqRead)),round(pos_uniqRead[4]*100/sum(pos_uniqRead))))', file=RS) ++ print('mtext(4, text = "Reads %", line = 2)', file=RS) ++ print('dev.off()', file=RS) + #self.f.seek(0) + + def clipping_profile(self,outfile): +@@ -3185,7 +3185,7 @@ class ParseBAM: + + OUT=open(out_file1,'w') + ROUT=open(out_file2,'w') +- print >>OUT, "Position\tRead_Total\tRead_clipped" ++ print("Position\tRead_Total\tRead_clipped", file=OUT) + soft_p = re.compile(r'(.*?)(\d+)S') + read_part = re.compile(r'(\d+)[MIS=X]') + total_read =0 +@@ -3195,13 +3195,13 @@ class ParseBAM: + read_pos=[] + clip_count=[] + +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): + exon_boundary="" +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_unmapped:continue #skip unmapped read + if aligned_read.is_qcfail:continue #skip low quality + +@@ -3217,24 +3217,24 @@ class ParseBAM: + soft_clip_profile[n]+=1 + skip_part_of_read += int(j[1]) + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + + for i in soft_clip_profile: + read_pos.append(str(i)) + clip_count.append(str(soft_clip_profile[i])) +- print >>OUT, str(i) + '\t' + str(total_read) + '\t' + str(soft_clip_profile[i]) +- print >>ROUT, "pdf('clipping_profile.pdf')" +- print >>ROUT, "read_pos=c(" + ','.join(read_pos) + ')' +- print >>ROUT, "count=c(" + ','.join(clip_count) + ')' +- print >>ROUT, 'plot(read_pos,1-(count/%d),col="blue",main="clipping profile",xlab="Position of reads",ylab="Mappability",type="b")' % total_read +- print >>ROUT, "dev.off()" ++ print(str(i) + '\t' + str(total_read) + '\t' + str(soft_clip_profile[i]), file=OUT) ++ print("pdf('clipping_profile.pdf')", file=ROUT) ++ print("read_pos=c(" + ','.join(read_pos) + ')', file=ROUT) ++ print("count=c(" + ','.join(clip_count) + ')', file=ROUT) ++ print('plot(read_pos,1-(count/%d),col="blue",main="clipping profile",xlab="Position of reads",ylab="Mappability",type="b")' % total_read, file=ROUT) ++ print("dev.off()", file=ROUT) + + def coverageGeneBody(self,refbed,outfile): + '''Calculate reads coverage over gene body, from 5'to 3'. each gene will be equally divided + into 100 regsions''' + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + OUT1 = open(outfile + ".geneBodyCoverage_plot.r",'w') + OUT2 = open(outfile + ".geneBodyCoverage.txt",'w') +@@ -3245,12 +3245,12 @@ class ParseBAM: + rpkm={} + + #read SAM +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_qcfail:continue #skip low quanlity + if aligned_read.is_duplicate:continue #skip duplicate read + if aligned_read.is_secondary:continue #skip non primary hit +@@ -3269,9 +3269,9 @@ class ParseBAM: + else: + ranges[chrom].add_interval( Interval( exon[1], exon[2] ) ) + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>sys.stderr, "calculating coverage over gene body ..." ++ print("calculating coverage over gene body ...", file=sys.stderr) + coverage=collections.defaultdict(int) + flag=0 + for line in open(refbed,'r'): +@@ -3285,19 +3285,19 @@ class ParseBAM: + geneName = fields[3] + strand = fields[5] + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line, ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, end=' ', file=sys.stderr) + continue + gene_all_base=[] + percentile_base=[] + mRNA_len =0 + flag=0 + for st,end in zip(exon_starts,exon_ends): +- gene_all_base.extend(range(st+1,end+1)) #0-based coordinates on genome ++ gene_all_base.extend(list(range(st+1,end+1))) #0-based coordinates on genome + mRNA_len = len(gene_all_base) + if mRNA_len <100: + flag=1 +@@ -3314,18 +3314,18 @@ class ParseBAM: + coverage[i] += len(ranges[chrom].find(percentile_base[i], percentile_base[i]+1)) + x_coord=[] + y_coord=[] +- print >>OUT2, "Total reads: " + str(totalReads) +- print >>OUT2, "Fragment number: " + str(fragment_num) +- print >>OUT2, "percentile\tcount" ++ print("Total reads: " + str(totalReads), file=OUT2) ++ print("Fragment number: " + str(fragment_num), file=OUT2) ++ print("percentile\tcount", file=OUT2) + for i in coverage: + x_coord.append(str(i)) + y_coord.append(str(coverage[i])) +- print >>OUT2, str(i) + '\t' + str(coverage[i]) +- print >>OUT1, "pdf(\'%s\')" % (outfile + ".geneBodyCoverage.pdf") +- print >>OUT1, "x=0:100" +- print >>OUT1, "y=c(" + ','.join(y_coord) + ')' +- print >>OUT1, "plot(x,y,xlab=\"percentile of gene body (5'->3')\",ylab='read number',type='s')" +- print >>OUT1, "dev.off()" ++ print(str(i) + '\t' + str(coverage[i]), file=OUT2) ++ print("pdf(\'%s\')" % (outfile + ".geneBodyCoverage.pdf"), file=OUT1) ++ print("x=0:100", file=OUT1) ++ print("y=c(" + ','.join(y_coord) + ')', file=OUT1) ++ print("plot(x,y,xlab=\"percentile of gene body (5'->3')\",ylab='read number',type='s')", file=OUT1) ++ print("dev.off()", file=OUT1) + + def mRNA_inner_distance(self,outfile,refbed,low_bound=0,up_bound=1000,step=10): + '''estimate the inner distance of mRNA pair end fragment. fragment size = insert_size + 2 x read_length''' +@@ -3342,33 +3342,33 @@ class ParseBAM: + ranges={} + ranges[fchrom]=Intersecter() + +- window_left_bound = range(low_bound,up_bound,step) +- frag_size=0 ++ window_left_bound = list(range(low_bound,up_bound,step)) ++ frag_size=0 + + inner_distance_bitsets=BinnedBitSet() + tmp = BinnedBitSet() + tmp.set_range(0,0) +- pair_num=0.0 +- sizes=[] +- counts=[] +- count=0 +- +- print >>sys.stderr, "Get intron regions from " + refbed + " ..." +- bed_obj = BED.ParseBED(refbed) +- ref_exons = [] +- +- for exn in bed_obj.getExon(): +- ref_exons.append([exn[0].upper(), exn[1], exn[2]]) ++ pair_num=0.0 ++ sizes=[] ++ counts=[] ++ count=0 ++ ++ print("Get intron regions from " + refbed + " ...", file=sys.stderr) ++ bed_obj = BED.ParseBED(refbed) ++ ref_exons = [] ++ ++ for exn in bed_obj.getExon(): ++ ref_exons.append([exn[0].upper(), exn[1], exn[2]]) + exon_bitsets = binned_bitsets_from_list(ref_exons) + +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): + splice_intron_size=0 + flag=0 +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_qcfail:continue #skip low quanlity + if aligned_read.is_duplicate:continue #skip duplicate read + if aligned_read.is_secondary:continue #skip non primary hit +@@ -3430,28 +3430,28 @@ class ParseBAM: + ranges[fchrom].add_interval( Interval( inner_distance-1, inner_distance ) ) + + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + + if pair_num==0: +- print >>sys.stderr, "Cannot find paired reads" ++ print("Cannot find paired reads", file=sys.stderr) + sys.exit(0) + #print >>FQ, "Total paired read " + str(pair_num) + for st in window_left_bound: + sizes.append(str(st + step/2)) + count = str(len(ranges[fchrom].find(st,st + step))) + counts.append(count) +- print >>FQ, str(st) + '\t' + str(st+step) +'\t' + count ++ print(str(st) + '\t' + str(st+step) +'\t' + count, file=FQ) + +- print >>RS, "pdf(\'%s\')" % (outfile + ".inner_distance_plot.pdf") ++ print("pdf(\'%s\')" % (outfile + ".inner_distance_plot.pdf"), file=RS) + #print >>RS, "par(mfrow=c(2,1),cex.main=0.8,cex.lab=0.8,cex.axis=0.8,mar=c(4,4,4,1))" + #print >>RS, 'pie(c(%d,%d,%d),col=rainbow(3),cex=0.5,radius=1,main="Total %d fragments",labels=c("fraSize <= %d\\n(%4.2f%%)","fragSize > %d\\n(%4.2f%%)","%d < fragSize <= %d\\n(%4.2f%%)"), density=rep(80,80,80),angle=c(90,140,170))' % (ultra_low, ultra_high, pair_num -ultra_low -ultra_high, pair_num, low_bound, ultra_low*100/pair_num, up_bound, ultra_high*100/pair_num, low_bound, up_bound, 100-ultra_low*100/pair_num - ultra_high*100/pair_num) +- print >>RS, 'fragsize=rep(c(' + ','.join(sizes) + '),' + 'times=c(' + ','.join(counts) + '))' +- print >>RS, 'frag_sd = round(sd(fragsize))' +- print >>RS, 'frag_mean = round(mean(fragsize))' +- print >>RS, 'hist(fragsize,probability=T,breaks=%d,xlab="mRNA insert size (bp)",main=paste(c("Mean=",frag_mean,";","SD=",frag_sd),collapse=""),border="blue")' % len(window_left_bound) +- print >>RS, "lines(density(fragsize,bw=%d),col='red')" % (2*step) +- print >>RS ,"dev.off()" ++ print('fragsize=rep(c(' + ','.join(sizes) + '),' + 'times=c(' + ','.join(counts) + '))', file=RS) ++ print('frag_sd = round(sd(fragsize))', file=RS) ++ print('frag_mean = round(mean(fragsize))', file=RS) ++ print('hist(fragsize,probability=T,breaks=%d,xlab="mRNA insert size (bp)",main=paste(c("Mean=",frag_mean,";","SD=",frag_sd),collapse=""),border="blue")' % len(window_left_bound), file=RS) ++ print("lines(density(fragsize,bw=%d),col='red')" % (2*step), file=RS) ++ print("dev.off()", file=RS) + FO.close() + FQ.close() + RS.close() +@@ -3465,7 +3465,7 @@ class ParseBAM: + out_file = outfile + ".junction.xls" + out_file2 = outfile + ".junction_plot.r" + if refgene is None: +- print >>sys.stderr, "You must provide reference gene model in bed format." ++ print("You must provide reference gene model in bed format.", file=sys.stderr) + sys.exit(1) + OUT = open(out_file,'w') + ROUT = open(out_file2,'w') +@@ -3479,13 +3479,13 @@ class ParseBAM: + known_junc =0 + splicing_events=collections.defaultdict(int) + +- print >>sys.stderr, "Reading reference bed file: ",refgene, " ... ", ++ print("Reading reference bed file: ",refgene, " ... ", end=' ', file=sys.stderr) + for line in open(refgene,'r'): + if line.startswith(('#','track','browser')):continue + # Parse fields from gene tabls + fields = line.split() + if(len(fields)<12): +- print >>sys.stderr, "Invalid bed line (skipped):",line, ++ print("Invalid bed line (skipped):",line, end=' ', file=sys.stderr) + continue + chrom = fields[0].upper() + tx_start = int( fields[1] ) +@@ -3493,25 +3493,25 @@ class ParseBAM: + if int(fields[9] ==1): + continue + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + intron_start = exon_ends[:-1] + intron_end=exon_starts[1:] + for i_st,i_end in zip (intron_start, intron_end): + refIntronStarts[chrom][i_st] =i_st + refIntronEnds[chrom][i_end] =i_end +- print >>sys.stderr,"Done" ++ print("Done", file=sys.stderr) + + #reading input SAM file +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + + try: + while(1): + flag=0 +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_qcfail:continue #skip low quanlity + if aligned_read.is_duplicate:continue #skip duplicate read + if aligned_read.is_secondary:continue #skip non primary hit +@@ -3534,28 +3534,28 @@ class ParseBAM: + total_junc +=1 + if intrn[2] - intrn[1] < min_intron:continue + splicing_events[intrn[0] + ":" + str(intrn[1]) + ":" + str(intrn[2])] += 1 +- if (refIntronStarts[chrom].has_key(intrn[1]) and refIntronEnds[chrom].has_key(intrn[2])): ++ if (intrn[1] in refIntronStarts[chrom] and intrn[2] in refIntronEnds[chrom]): + known_junc +=1 #known both +- elif (not refIntronStarts[chrom].has_key(intrn[1]) and not refIntronEnds[chrom].has_key(intrn[2])): ++ elif (intrn[1] not in refIntronStarts[chrom] and intrn[2] not in refIntronEnds[chrom]): + novel35_junc +=1 + else: + novel3or5_junc +=1 + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print "total = " + str(total_junc) ++ print("total = " + str(total_junc)) + #self.f.seek(0) + +- print >>ROUT, 'pdf(\"%s\")' % (outfile + ".junction_plot.pdf") +- print >>ROUT, "events=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc)])+ ')' +- print >>ROUT, 'pie(events,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing events",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)) +- print >>ROUT, "dev.off()" ++ print('pdf(\"%s\")' % (outfile + ".junction_plot.pdf"), file=ROUT) ++ print("events=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc)])+ ')', file=ROUT) ++ print('pie(events,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing events",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)), file=ROUT) ++ print("dev.off()", file=ROUT) + +- print >>sys.stderr, "\n===================================================================" +- print >>sys.stderr, "Total splicing Events:\t" + str(total_junc) +- print >>sys.stderr, "Known Splicing Events:\t" + str(known_junc) +- print >>sys.stderr, "Partial Novel Splicing Events:\t" + str(novel3or5_junc) +- print >>sys.stderr, "Novel Splicing Events:\t" + str(novel35_junc) ++ print("\n===================================================================", file=sys.stderr) ++ print("Total splicing Events:\t" + str(total_junc), file=sys.stderr) ++ print("Known Splicing Events:\t" + str(known_junc), file=sys.stderr) ++ print("Partial Novel Splicing Events:\t" + str(novel3or5_junc), file=sys.stderr) ++ print("Novel Splicing Events:\t" + str(novel35_junc), file=sys.stderr) + + #reset variables + total_junc =0 +@@ -3563,36 +3563,36 @@ class ParseBAM: + novel3or5_junc =0 + known_junc =0 + +- print >>OUT, "chrom\tintron_st(0-based)\tintron_end(1-based)\tread_count\tannotation" ++ print("chrom\tintron_st(0-based)\tintron_end(1-based)\tread_count\tannotation", file=OUT) + for i in splicing_events: + total_junc += 1 + (chrom, i_st, i_end) = i.split(":") +- print >>OUT, '\t'.join([chrom.replace("CHR","chr"),i_st,i_end]) + '\t' + str(splicing_events[i]) + '\t', ++ print('\t'.join([chrom.replace("CHR","chr"),i_st,i_end]) + '\t' + str(splicing_events[i]) + '\t', end=' ', file=OUT) + i_st = int(i_st) + i_end = int(i_end) +- if (refIntronStarts[chrom].has_key(i_st) and refIntronEnds[chrom].has_key(i_end)): +- print >>OUT, "annotated" ++ if (i_st in refIntronStarts[chrom] and i_end in refIntronEnds[chrom]): ++ print("annotated", file=OUT) + known_junc +=1 +- elif (not refIntronStarts[chrom].has_key(i_st) and not refIntronEnds[chrom].has_key(i_end)): +- print >>OUT, 'complete_novel' ++ elif (i_st not in refIntronStarts[chrom] and i_end not in refIntronEnds[chrom]): ++ print('complete_novel', file=OUT) + novel35_junc +=1 + else: +- print >>OUT, 'partial_novel' ++ print('partial_novel', file=OUT) + novel3or5_junc +=1 + + if total_junc ==0: +- print >>sys.stderr, "No splice read found" ++ print("No splice read found", file=sys.stderr) + sys.exit(1) +- print >>sys.stderr, "\nTotal splicing Junctions:\t" + str(total_junc) +- print >>sys.stderr, "Known Splicing Junctions:\t" + str(known_junc) +- print >>sys.stderr, "Partial Novel Splicing Junctions:\t" + str(novel3or5_junc) +- print >>sys.stderr, "Novel Splicing Junctions:\t" + str(novel35_junc) +- print >>sys.stderr, "\n===================================================================" ++ print("\nTotal splicing Junctions:\t" + str(total_junc), file=sys.stderr) ++ print("Known Splicing Junctions:\t" + str(known_junc), file=sys.stderr) ++ print("Partial Novel Splicing Junctions:\t" + str(novel3or5_junc), file=sys.stderr) ++ print("Novel Splicing Junctions:\t" + str(novel35_junc), file=sys.stderr) ++ print("\n===================================================================", file=sys.stderr) + +- print >>ROUT, 'pdf("splicing_junction_pie.pdf")' +- print >>ROUT, "junction=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc,)])+ ')' +- print >>ROUT, 'pie(junction,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing junctions",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)) +- print >>ROUT, "dev.off()" ++ print('pdf("splicing_junction_pie.pdf")', file=ROUT) ++ print("junction=c(" + ','.join([str(i*100.0/total_junc) for i in (novel3or5_junc,novel35_junc,known_junc,)])+ ')', file=ROUT) ++ print('pie(junction,col=c(2,3,4),init.angle=30,angle=c(60,120,150),density=c(70,70,70),main="splicing junctions",labels=c("partial_novel %d%%","complete_novel %d%%","known %d%%"))' % (round(novel3or5_junc*100.0/total_junc),round(novel35_junc*100.0/total_junc),round(known_junc*100.0/total_junc)), file=ROUT) ++ print("dev.off()", file=ROUT) + #print >>ROUT, "mat=matrix(c(events,junction),byrow=T,ncol=3)" + #print >>ROUT, 'barplot(mat,beside=T,ylim=c(0,100),names=c("known","partial\nnovel","complete\nnovel"),legend.text=c("splicing events","splicing junction"),ylab="Percent")' + +@@ -3601,7 +3601,7 @@ class ParseBAM: + + out_file = outfile + ".junctionSaturation_plot.r" + if refgene is None: +- print >>sys.stderr, "You must provide reference gene model in bed format." ++ print("You must provide reference gene model in bed format.", file=sys.stderr) + sys.exit(1) + + OUT = open(out_file,'w') +@@ -3609,12 +3609,12 @@ class ParseBAM: + + #reading reference gene + knownSpliceSites= set() +- print >>sys.stderr, "reading reference bed file: ",refgene, " ... ", ++ print("reading reference bed file: ",refgene, " ... ", end=' ', file=sys.stderr) + for line in open(refgene,'r'): + if line.startswith(('#','track','browser')):continue + fields = line.split() + if(len(fields)<12): +- print >>sys.stderr, "Invalid bed line (skipped):",line, ++ print("Invalid bed line (skipped):",line, end=' ', file=sys.stderr) + continue + chrom = fields[0].upper() + tx_start = int( fields[1] ) +@@ -3622,15 +3622,15 @@ class ParseBAM: + if int(fields[9] ==1): + continue + +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends); ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)); + intron_start = exon_ends[:-1] + intron_end=exon_starts[1:] + for st,end in zip (intron_start, intron_end): + knownSpliceSites.add(chrom + ":" + str(st) + "-" + str(end)) +- print >>sys.stderr,"Done! Total "+str(len(knownSpliceSites)) + " known splicing junctions." ++ print("Done! Total "+str(len(knownSpliceSites)) + " known splicing junctions.", file=sys.stderr) + + + #read SAM file +@@ -3639,12 +3639,12 @@ class ParseBAM: + intron_end=[] + uniqSpliceSites=collections.defaultdict(int) + +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + try: + while(1): + flag=0 +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_qcfail:continue #skip low quanlity + if aligned_read.is_duplicate:continue #skip duplicate read + if aligned_read.is_secondary:continue #skip non primary hit +@@ -3667,11 +3667,11 @@ class ParseBAM: + if intrn[2] - intrn[1] < min_intron:continue + samSpliceSites.append(intrn[0] + ":" + str(intrn[1]) + "-" + str(intrn[2])) + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + +- print >>sys.stderr, "shuffling alignments ...", ++ print("shuffling alignments ...", end=' ', file=sys.stderr) + random.shuffle(samSpliceSites) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + #resampling + SR_num = len(samSpliceSites) +@@ -3681,7 +3681,7 @@ class ParseBAM: + all_junc=[] + unknown_junc=[] + #=========================sampling uniquely mapped reads from population +- tmp=range(sample_start,sample_end,sample_step) ++ tmp=list(range(sample_start,sample_end,sample_step)) + tmp.append(100) + for pertl in tmp: #[5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95,100] + knownSpliceSites_num = 0 +@@ -3690,21 +3690,21 @@ class ParseBAM: + if index_st < 0: index_st = 0 + sample_size += index_end -index_st + +- print >>sys.stderr, "sampling " + str(pertl) +"% (" + str(sample_size) + ") splicing reads.", ++ print("sampling " + str(pertl) +"% (" + str(sample_size) + ") splicing reads.", end=' ', file=sys.stderr) + + #all splice juntion + for i in range(index_st, index_end): + uniqSpliceSites[samSpliceSites[i]] +=1 +- all_junctionNum = len(uniqSpliceSites.keys()) ++ all_junctionNum = len(list(uniqSpliceSites.keys())) + all_junc.append(str(all_junctionNum)) +- print >>sys.stderr, str(all_junctionNum) + " splicing junctions.", ++ print(str(all_junctionNum) + " splicing junctions.", end=' ', file=sys.stderr) + + #known splice junction + known_junctionNum = 0 + for sj in uniqSpliceSites: + if sj in knownSpliceSites and uniqSpliceSites[sj] >= recur: + known_junctionNum +=1 +- print >>sys.stderr, str(known_junctionNum) + " known splicing junctions.", ++ print(str(known_junctionNum) + " known splicing junctions.", end=' ', file=sys.stderr) + known_junc.append(str(known_junctionNum)) + + #unknown splice junction +@@ -3713,28 +3713,28 @@ class ParseBAM: + if sj not in knownSpliceSites: + unknown_junctionNum +=1 + unknown_junc.append(str(unknown_junctionNum)) +- print >>sys.stderr, str(unknown_junctionNum) + " novel splicing junctions." ++ print(str(unknown_junctionNum) + " novel splicing junctions.", file=sys.stderr) + + #for j in uniq_SJ: + #print >>OUT, j + "\t" + str(uniq_SJ[j]) +- print >>OUT, "pdf(\'%s\')" % (outfile + '.junctionSaturation_plot.pdf') +- print >>OUT, "x=c(" + ','.join([str(i) for i in tmp]) + ')' +- print >>OUT, "y=c(" + ','.join(known_junc) + ')' +- print >>OUT, "z=c(" + ','.join(all_junc) + ')' +- print >>OUT, "w=c(" + ','.join(unknown_junc) + ')' +- print >>OUT, "m=max(%d,%d,%d)" % (int(int(known_junc[-1])/1000), int(int(all_junc[-1])/1000),int(int(unknown_junc[-1])/1000)) +- print >>OUT, "n=min(%d,%d,%d)" % (int(int(known_junc[0])/1000), int(int(all_junc[0])/1000),int(int(unknown_junc[0])/1000)) +- print >>OUT, "plot(x,z/1000,xlab='percent of total reads',ylab='Number of splicing junctions (x1000)',type='o',col='blue',ylim=c(n,m))" +- print >>OUT, "points(x,y/1000,type='o',col='red')" +- print >>OUT, "points(x,w/1000,type='o',col='green')" +- print >>OUT, 'legend(5,%d, legend=c("All junctions","known junctions", "novel junctions"),col=c("blue","red","green"),lwd=1,pch=1)' % int(int(all_junc[-1])/1000) +- print >>OUT, "dev.off()" ++ print("pdf(\'%s\')" % (outfile + '.junctionSaturation_plot.pdf'), file=OUT) ++ print("x=c(" + ','.join([str(i) for i in tmp]) + ')', file=OUT) ++ print("y=c(" + ','.join(known_junc) + ')', file=OUT) ++ print("z=c(" + ','.join(all_junc) + ')', file=OUT) ++ print("w=c(" + ','.join(unknown_junc) + ')', file=OUT) ++ print("m=max(%d,%d,%d)" % (int(int(known_junc[-1])/1000), int(int(all_junc[-1])/1000),int(int(unknown_junc[-1])/1000)), file=OUT) ++ print("n=min(%d,%d,%d)" % (int(int(known_junc[0])/1000), int(int(all_junc[0])/1000),int(int(unknown_junc[0])/1000)), file=OUT) ++ print("plot(x,z/1000,xlab='percent of total reads',ylab='Number of splicing junctions (x1000)',type='o',col='blue',ylim=c(n,m))", file=OUT) ++ print("points(x,y/1000,type='o',col='red')", file=OUT) ++ print("points(x,w/1000,type='o',col='green')", file=OUT) ++ print('legend(5,%d, legend=c("All junctions","known junctions", "novel junctions"),col=c("blue","red","green"),lwd=1,pch=1)' % int(int(all_junc[-1])/1000), file=OUT) ++ print("dev.off()", file=OUT) + + def saturation_RPKM(self,refbed,outfile,sample_start=5,sample_step=5,sample_end=100,skip_multi=True, strand_rule=None): + '''for each gene, check if its RPKM (epxresion level) has already been saturated or not''' + + if refbed is None: +- print >>sys.stderr,"You must specify a bed file representing gene model\n" ++ print("You must specify a bed file representing gene model\n", file=sys.stderr) + exit(0) + rpkm_file = outfile + ".eRPKM.xls" + raw_file = outfile + ".rawCount.xls" +@@ -3759,17 +3759,17 @@ class ParseBAM: + elif len(strand_rule.split(',')) ==2: #singeEnd, strand-specific + for i in strand_rule.split(','):strandRule[i[0]]=i[1] + else: +- print >>sys.stderr, "Unknown value of: 'strand_rule' " + strand_rule ++ print("Unknown value of: 'strand_rule' " + strand_rule, file=sys.stderr) + sys.exit(1) + + + #read SAM or BAM +- if self.bam_format:print >>sys.stderr, "Load BAM file ... ", +- else:print >>sys.stderr, "Load SAM file ... ", ++ if self.bam_format:print("Load BAM file ... ", end=' ', file=sys.stderr) ++ else:print("Load SAM file ... ", end=' ', file=sys.stderr) + try: + while(1): + flag=0 +- aligned_read = self.samfile.next() ++ aligned_read = next(self.samfile) + if aligned_read.is_qcfail:continue #skip low quanlity + if aligned_read.is_duplicate:continue #skip duplicate read + if aligned_read.is_secondary:continue #skip non primary hit +@@ -3812,14 +3812,14 @@ class ParseBAM: + for exn in exon_blocks: + block_list.append(exn[0] + ":" + str(exn[1] + (exn[2]-exn[1])/2 )) + except StopIteration: +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + +- print >>sys.stderr, "shuffling alignments ...", ++ print("shuffling alignments ...", end=' ', file=sys.stderr) + random.shuffle(block_list_plus) + random.shuffle(block_list_minus) + random.shuffle(block_list) +- print >>sys.stderr, "Done" ++ print("Done", file=sys.stderr) + + + ranges_plus={} +@@ -3830,7 +3830,7 @@ class ParseBAM: + rawCount_table=collections.defaultdict(list) + RPKM_head=['#chr','start','end','name','score','strand'] + +- tmp=range(sample_start,sample_end,sample_step) ++ tmp=list(range(sample_start,sample_end,sample_step)) + tmp.append(100) + #=========================sampling uniquely mapped reads from population + for pertl in tmp: #[5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95,100] +@@ -3841,27 +3841,27 @@ class ParseBAM: + RPKM_head.append(str(pertl) + '%') + + if strand_rule is not None: +- print >>sys.stderr, "sampling " + str(pertl) +"% (" + str(int(cUR_plus * percent_end)) + ") forward strand fragments ..." ++ print("sampling " + str(pertl) +"% (" + str(int(cUR_plus * percent_end)) + ") forward strand fragments ...", file=sys.stderr) + for i in block_list_plus[int(cUR_plus*percent_st):int(cUR_plus*percent_end)]: + (chr,coord) = i.split(':') + if chr not in ranges_plus:ranges_plus[chr] = Intersecter() + else:ranges_plus[chr].add_interval( Interval( int(coord), int(coord)+1 ) ) + +- print >>sys.stderr, "sampling " + str(pertl) +"% (" + str(int(cUR_minus * percent_end)) + ") reverse strand fragments ..." ++ print("sampling " + str(pertl) +"% (" + str(int(cUR_minus * percent_end)) + ") reverse strand fragments ...", file=sys.stderr) + for i in block_list_minus[int(cUR_minus*percent_st):int(cUR_minus*percent_end)]: + (chr,coord) = i.split(':') + if chr not in ranges_minus:ranges_minus[chr] = Intersecter() + else:ranges_minus[chr].add_interval( Interval( int(coord), int(coord)+1 ) ) + + else: +- print >>sys.stderr, "sampling " + str(pertl) +"% (" + str(int(sample_size)) + ") fragments ..." ++ print("sampling " + str(pertl) +"% (" + str(int(sample_size)) + ") fragments ...", file=sys.stderr) + for i in block_list[int(cUR_num*percent_st):int(cUR_num*percent_end)]: + (chr,coord) = i.split(':') + if chr not in ranges:ranges[chr] = Intersecter() + else:ranges[chr].add_interval( Interval( int(coord), int(coord)+1 ) ) + + #========================= calculating RPKM based on sub-population +- print >>sys.stderr, "assign reads to transcripts in " + refbed + ' ...' ++ print("assign reads to transcripts in " + refbed + ' ...', file=sys.stderr) + for line in open(refbed,'r'): + try: + if line.startswith(('#','track','browser')):continue +@@ -3872,14 +3872,14 @@ class ParseBAM: + tx_end = int( fields[2] ) + geneName = fields[3] + strand = fields[5] +- exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) ) +- exon_starts = map((lambda x: x + tx_start ), exon_starts) +- exon_ends = map( int, fields[10].rstrip( ',\n' ).split( ',' ) ) +- exon_ends = map((lambda x, y: x + y ), exon_starts, exon_ends) +- exon_sizes = map(int,fields[10].rstrip(',\n').split(',')) ++ exon_starts = list(map( int, fields[11].rstrip( ',\n' ).split( ',' ) )) ++ exon_starts = list(map((lambda x: x + tx_start ), exon_starts)) ++ exon_ends = list(map( int, fields[10].rstrip( ',\n' ).split( ',' ) )) ++ exon_ends = list(map((lambda x, y: x + y ), exon_starts, exon_ends)) ++ exon_sizes = list(map(int,fields[10].rstrip(',\n').split(','))) + key='\t'.join((chrom.lower(),str(tx_start),str(tx_end),geneName,'0',strand)) + except: +- print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line ++ print("[NOTE:input bed must be 12-column] skipped this line: " + line, file=sys.stderr) + continue + mRNA_count=0 #we need to initializ it to 0 for each gene + mRNA_len=sum(exon_sizes) +@@ -3892,24 +3892,24 @@ class ParseBAM: + if chrom in ranges: + mRNA_count += len(ranges[chrom].find(st,end)) + if mRNA_len ==0: +- print >>sys.stderr, geneName + " has 0 nucleotides. Exit!" ++ print(geneName + " has 0 nucleotides. Exit!", file=sys.stderr) + sys.exit(1) + if sample_size == 0: +- print >>sys.stderr, "Too few reads to sample. Exit!" ++ print("Too few reads to sample. Exit!", file=sys.stderr) + sys.exit(1) + mRNA_RPKM = (mRNA_count * 1000000000.0)/(mRNA_len * sample_size) + RPKM_table[key].append(str(mRNA_RPKM)) + rawCount_table[key].append(str(mRNA_count)) +- print >>sys.stderr, "" ++ print("", file=sys.stderr) + + #self.f.seek(0) +- print >>RPKM_OUT, '\t'.join(RPKM_head) +- print >>RAW_OUT, '\t'.join(RPKM_head) ++ print('\t'.join(RPKM_head), file=RPKM_OUT) ++ print('\t'.join(RPKM_head), file=RAW_OUT) + for key in RPKM_table: +- print >>RPKM_OUT, key + '\t', +- print >>RPKM_OUT, '\t'.join(RPKM_table[key]) +- print >>RAW_OUT, key + '\t', +- print >>RAW_OUT, '\t'.join(rawCount_table[key]) ++ print(key + '\t', end=' ', file=RPKM_OUT) ++ print('\t'.join(RPKM_table[key]), file=RPKM_OUT) ++ print(key + '\t', end=' ', file=RAW_OUT) ++ print('\t'.join(rawCount_table[key]), file=RAW_OUT) + + def fetchAlignments(self,chr,st,end): + '''fetch alignment from sorted BAM file based on chr, st, end +@@ -3927,4 +3927,4 @@ def print_bits_as_bed( bits ): + start = bits.next_set( end ) + if start == bits.size: break + end = bits.next_clear( start ) +- print "%d\t%d" % ( start, end ) ++ print("%d\t%d" % ( start, end )) diff --git a/biology/py-ont-fast5-api/Makefile b/biology/py-ont-fast5-api/Makefile index d998d68a0e0..79879d97e61 100644 --- a/biology/py-ont-fast5-api/Makefile +++ b/biology/py-ont-fast5-api/Makefile @@ -1,6 +1,6 @@ PORTNAME= ont-fast5-api DISTVERSIONPREFIX= release_ -DISTVERSION= 4.0.0 +DISTVERSION= 4.0.2 CATEGORIES= biology python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -18,7 +18,7 @@ RUN_DEPENDS= ${PYNUMPY} \ TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pip>=0:devel/py-pip@${PY_FLAVOR} USES= python -USE_PYTHON= autoplist distutils +USE_PYTHON= autoplist concurrent distutils USE_GITHUB= yes GH_ACCOUNT= nanoporetech diff --git a/biology/py-ont-fast5-api/distinfo b/biology/py-ont-fast5-api/distinfo index 9156cd8912e..d6298ee5ba5 100644 --- a/biology/py-ont-fast5-api/distinfo +++ b/biology/py-ont-fast5-api/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1628770827 -SHA256 (nanoporetech-ont_fast5_api-release_4.0.0_GH0.tar.gz) = 7f9b3fa0f814ff38c30e9cb21971ec61e2b3fd52fb166a8404c21bf25679e0ab -SIZE (nanoporetech-ont_fast5_api-release_4.0.0_GH0.tar.gz) = 4081125 +TIMESTAMP = 1648128656 +SHA256 (nanoporetech-ont_fast5_api-release_4.0.2_GH0.tar.gz) = c078dc0d20b02cd9807105415f259e099f1adc8b922d67a1a9fe04321c098361 +SIZE (nanoporetech-ont_fast5_api-release_4.0.2_GH0.tar.gz) = 4082466 diff --git a/biology/py-ont-fast5-api/files/patch-setup.py b/biology/py-ont-fast5-api/files/patch-setup.py new file mode 100644 index 00000000000..a29c7326d3b --- /dev/null +++ b/biology/py-ont-fast5-api/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2022-03-25 16:23:32 UTC ++++ setup.py +@@ -35,7 +35,7 @@ setup(name=__pkg_name__.replace("_", "-"), + version=get_version(), + url='https://github.com/nanoporetech/{}'.format(__pkg_name__), + install_requires=installation_requirements, +- packages=find_packages(), ++ packages=find_packages(exclude=['test']), + package_data={__pkg_name__: ['vbz_plugin/*.so', 'vbz_plugin/*.dylib', 'vbz_plugin/*.dll']}, + python_requires='>=3.6', + entry_points={'console_scripts': [ diff --git a/biology/python-nexus/files/patch-2to3 b/biology/python-nexus/files/patch-2to3 new file mode 100644 index 00000000000..07cc1ef4371 --- /dev/null +++ b/biology/python-nexus/files/patch-2to3 @@ -0,0 +1,19 @@ +--- nexus/bin/nexuscheck.py.orig 2018-04-10 09:11:00 UTC ++++ nexus/bin/nexuscheck.py +@@ -1,6 +1,6 @@ + #!/usr/bin/env python + import warnings +-from __future__ import print_function ++ + from nexus import NexusReader, VERSION + from nexus.checker import checkers + +@@ -44,7 +44,7 @@ if __name__ == '__main__': + if len(warned): + print("Warnings encountered in reading nexus:") + for w in warned: +- print("\t%s" % w) ++ print(("\t%s" % w)) + + for checker in checkers: + checker(nex, verbose=args.verbose).status() diff --git a/biology/sra-tools/Makefile b/biology/sra-tools/Makefile index 91b15622dd0..ffed2a35cf2 100644 --- a/biology/sra-tools/Makefile +++ b/biology/sra-tools/Makefile @@ -1,5 +1,6 @@ PORTNAME= sra-tools DISTVERSION= 2.11.0 +PORTREVISION= 1 CATEGORIES= biology MAINTAINER= jwb@FreeBSD.org diff --git a/cad/horizon-eda/Makefile b/cad/horizon-eda/Makefile index f74a3442b8c..4a0b7f736f5 100644 --- a/cad/horizon-eda/Makefile +++ b/cad/horizon-eda/Makefile @@ -1,6 +1,7 @@ PORTNAME= horizon-eda DISTVERSIONPREFIX= v DISTVERSION= 2.2.0 +PORTREVISION= 1 CATEGORIES= cad MAINTAINER= yuri@FreeBSD.org diff --git a/cad/ifcopenshell/Makefile b/cad/ifcopenshell/Makefile index e1eb435371c..a6e523f7a33 100644 --- a/cad/ifcopenshell/Makefile +++ b/cad/ifcopenshell/Makefile @@ -1,7 +1,7 @@ PORTNAME= ifcopenshell DISTVERSIONPREFIX= v DISTVERSION= 0.6.0 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= cad MAINTAINER= lbartoletti@FreeBSD.org diff --git a/cad/opencascade/Makefile b/cad/opencascade/Makefile index 4831b918fd1..692a8bb18a8 100644 --- a/cad/opencascade/Makefile +++ b/cad/opencascade/Makefile @@ -2,7 +2,7 @@ PORTNAME= opencascade PORTVERSION= 7.6.0 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= cad science MASTER_SITES= LOCAL/thierry diff --git a/cad/openscad-devel/Makefile b/cad/openscad-devel/Makefile index 03e881196a2..af3f2ed9f63 100644 --- a/cad/openscad-devel/Makefile +++ b/cad/openscad-devel/Makefile @@ -2,7 +2,7 @@ PORTNAME= openscad PORTVERSION= 2021.02.09 -PORTREVISION= 8 +PORTREVISION= 10 CATEGORIES= cad PKGNAMESUFFIX= -devel diff --git a/cad/openscad/Makefile b/cad/openscad/Makefile index 3ad7848e7b9..ba84a992bd7 100644 --- a/cad/openscad/Makefile +++ b/cad/openscad/Makefile @@ -2,7 +2,7 @@ PORTNAME= openscad PORTVERSION= 2021.01.01 -PORTREVISION= 8 +PORTREVISION= 10 CATEGORIES= cad PATCH_SITES= https://github.com/${GH_ACCOUNT}/${GH_PROJECT}/commit/ diff --git a/cad/openvsp/Makefile b/cad/openvsp/Makefile index 1c2c29b218c..96bc31fbfa5 100644 --- a/cad/openvsp/Makefile +++ b/cad/openvsp/Makefile @@ -1,6 +1,7 @@ PORTNAME= openvsp DISTVERSIONPREFIX= ${GH_PROJECT}_ DISTVERSION= 3.27.1 +PORTREVISION= 1 CATEGORIES= cad MAINTAINER= fernape@FreeBSD.org diff --git a/cad/oregano/Makefile b/cad/oregano/Makefile index 905920e0561..7c81819f41d 100644 --- a/cad/oregano/Makefile +++ b/cad/oregano/Makefile @@ -3,7 +3,7 @@ PORTNAME= oregano DISTVERSIONPREFIX= v DISTVERSION= 0.84.43 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= cad MAINTAINER= yuri@FreeBSD.org diff --git a/cad/repsnapper/Makefile b/cad/repsnapper/Makefile index 0b22cda871b..e56c9f38cb2 100644 --- a/cad/repsnapper/Makefile +++ b/cad/repsnapper/Makefile @@ -2,7 +2,7 @@ PORTNAME= repsnapper DISTVERSION= 2.5a4 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= cad MAINTAINER= martin.dieringer@gmx.de diff --git a/cad/solvespace/Makefile b/cad/solvespace/Makefile index 7b4b9c3f58b..743e3817d62 100644 --- a/cad/solvespace/Makefile +++ b/cad/solvespace/Makefile @@ -1,5 +1,6 @@ PORTNAME= solvespace PORTVERSION= 3.0 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= cad diff --git a/cad/veroroute/Makefile b/cad/veroroute/Makefile index a50dcd70b0e..08fad62f2a1 100644 --- a/cad/veroroute/Makefile +++ b/cad/veroroute/Makefile @@ -1,5 +1,5 @@ PORTNAME= veroroute -DISTVERSION= 2.19 +DISTVERSION= 2.20 PORTEPOCH= 1 CATEGORIES= cad MASTER_SITES= SF/${PORTNAME}/ diff --git a/cad/veroroute/distinfo b/cad/veroroute/distinfo index 1846e725295..d2e950e4b5a 100644 --- a/cad/veroroute/distinfo +++ b/cad/veroroute/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645743298 -SHA256 (veroroute-2.19.tar.gz) = 1f8ad7c18cad4066eafeffe19f045b5aefa871bdf20abdb58da90f128b6880e0 -SIZE (veroroute-2.19.tar.gz) = 958167 +TIMESTAMP = 1648346755 +SHA256 (veroroute-2.20.tar.gz) = 530f4e4e86889ca26f2a5127466d0c55669ae71b327873cf18a8a9f451e38a8e +SIZE (veroroute-2.20.tar.gz) = 1018969 diff --git a/cad/veroroute/pkg-plist b/cad/veroroute/pkg-plist index deeb44b3def..9280c3f8f84 100644 --- a/cad/veroroute/pkg-plist +++ b/cad/veroroute/pkg-plist @@ -181,6 +181,9 @@ share/pixmaps/veroroute.png %%DATADIR%%/tutorials/tutorial_20.vrt %%DATADIR%%/tutorials/tutorial_21.vrt %%DATADIR%%/tutorials/tutorial_22.vrt +%%DATADIR%%/tutorials/tutorial_23.vrt +%%DATADIR%%/tutorials/tutorial_24.vrt +%%DATADIR%%/tutorials/tutorial_25.vrt %%DATADIR%%/tutorials/tutorial_3.vrt %%DATADIR%%/tutorials/tutorial_4.vrt %%DATADIR%%/tutorials/tutorial_5.vrt diff --git a/chinese/fcitx/Makefile b/chinese/fcitx/Makefile index 52292fd8324..dfd32c49a13 100644 --- a/chinese/fcitx/Makefile +++ b/chinese/fcitx/Makefile @@ -2,6 +2,7 @@ PORTNAME= fcitx PORTVERSION= 4.2.9.7 +PORTREVISION= 1 CATEGORIES= chinese x11 MASTER_SITES= http://download.fcitx-im.org/fcitx/:fcitx \ http://download.fcitx-im.org/data/:data diff --git a/comms/gnocky/Makefile b/comms/gnocky/Makefile index b621e2cc0bb..eedcef54eed 100644 --- a/comms/gnocky/Makefile +++ b/comms/gnocky/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnocky PORTVERSION= 0.0.7 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= comms MASTER_SITES= http://www.gnokii.org/download/gnocky/ diff --git a/comms/gstreamer1-plugins-spandsp/Makefile b/comms/gstreamer1-plugins-spandsp/Makefile index 28d250a2474..3473021a3cf 100644 --- a/comms/gstreamer1-plugins-spandsp/Makefile +++ b/comms/gstreamer1-plugins-spandsp/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= comms COMMENT= GStreamer DSP library and software FAX machine plugin diff --git a/comms/hamlib/Makefile b/comms/hamlib/Makefile index a53a00bacfe..ed3a44646a9 100644 --- a/comms/hamlib/Makefile +++ b/comms/hamlib/Makefile @@ -2,6 +2,7 @@ PORTNAME= hamlib PORTVERSION= 4.4 +PORTREVISION= 1 CATEGORIES= comms hamradio MASTER_SITES= SF diff --git a/comms/py-esptool/Makefile b/comms/py-esptool/Makefile index bc696366cd6..997c100eccb 100644 --- a/comms/py-esptool/Makefile +++ b/comms/py-esptool/Makefile @@ -1,6 +1,6 @@ PORTNAME= esptool DISTVERSIONPREFIX= v -DISTVERSION= 3.2 +DISTVERSION= 3.3 CATEGORIES= comms python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -11,7 +11,7 @@ LICENSE= GPLv2 LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pyserial>=3.0:comms/py-pyserial@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}ecdsa>0:security/py-ecdsa@${PY_FLAVOR}\ + ${PYTHON_PKGNAMEPREFIX}ecdsa>=0.16.0:security/py-ecdsa@${PY_FLAVOR}\ ${PYTHON_PKGNAMEPREFIX}bitstring>=3.1.6:devel/py-bitstring@${PY_FLAVOR}\ ${PYTHON_PKGNAMEPREFIX}cryptography>=2.1.4:security/py-cryptography@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}reedsolo>=1.5.3,<=1.5.4:devel/py-reedsolo@${PY_FLAVOR} @@ -41,7 +41,7 @@ do-test: test/test_modules.py; \ do ${SETENV} ${TEST_ENV} ${PYTHON_CMD} $${test}; done @cd ${TEST_WRKSRC} && for chip in \ - esp32 esp32s2 esp32s3beta2 esp32s3 esp32c3 esp32h2; \ + esp32 esp32c2 esp32c3 esp32s2 esp32s3 esp32s3beta2 esp32h2beta1; \ do ${SETENV} ${TEST_ENV} ${PYTHON_CMD} \ test/test_espefuse_host.py $${chip}; done .if exists(${ESPTOOL_SERIALPORT}) diff --git a/comms/py-esptool/distinfo b/comms/py-esptool/distinfo index 8c0411a1463..d753ddf9ccb 100644 --- a/comms/py-esptool/distinfo +++ b/comms/py-esptool/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637306726 -SHA256 (espressif-esptool-v3.2_GH0.tar.gz) = f833a78890a833315a6dbaedffb2086e975c493dc0df597c97d7e14c7eadba88 -SIZE (espressif-esptool-v3.2_GH0.tar.gz) = 7155428 +TIMESTAMP = 1648106321 +SHA256 (espressif-esptool-v3.3_GH0.tar.gz) = 963373977ef3293b6cb504f2f2dbc1f539e361fdd77ab2b9da1c13625e8f5ec1 +SIZE (espressif-esptool-v3.3_GH0.tar.gz) = 7259717 diff --git a/comms/xastir/Makefile b/comms/xastir/Makefile index 7535b2aee4a..2550790fc35 100644 --- a/comms/xastir/Makefile +++ b/comms/xastir/Makefile @@ -3,6 +3,7 @@ PORTNAME= xastir DISTVERSIONPREFIX= Release- DISTVERSION= 2.1.8 +PORTREVISION= 1 CATEGORIES= comms hamradio MAINTAINER= carl@stagecraft.cx diff --git a/converters/p5-Text-Iconv/Makefile b/converters/p5-Text-Iconv/Makefile index 834ed6f9c89..719c41682be 100644 --- a/converters/p5-Text-Iconv/Makefile +++ b/converters/p5-Text-Iconv/Makefile @@ -2,7 +2,7 @@ PORTNAME= Text-Iconv PORTVERSION= 1.7 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= converters perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- @@ -18,4 +18,7 @@ USE_PERL5= configure CONFIGURE_ARGS+=INC="-I${ICONV_PREFIX}/include ${CFLAGS}" \ LIBS="-L${ICONV_PREFIX}/lib ${ICONV_LIB}" +post-install: + ${STRIP_CMD} ${STAGEDIR}${PREFIX}/${SITE_ARCH_REL}/auto/Text/Iconv/Iconv.so + .include diff --git a/converters/p5-Unicode-String/Makefile b/converters/p5-Unicode-String/Makefile index a57e3d3ab9a..490bd4b2b75 100644 --- a/converters/p5-Unicode-String/Makefile +++ b/converters/p5-Unicode-String/Makefile @@ -2,6 +2,7 @@ PORTNAME= Unicode-String PORTVERSION= 2.10 +PORTREVISION= 1 CATEGORIES= converters perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- @@ -15,4 +16,7 @@ LICENSE_COMB= dual USES= perl5 USE_PERL5= configure +post-install: + ${STRIP_CMD} ${STAGEDIR}${PREFIX}/${SITE_ARCH_REL}/auto/Unicode/String/String.so + .include diff --git a/converters/p5-Unicode-UTF8simple/Makefile b/converters/p5-Unicode-UTF8simple/Makefile index ee4f43273c3..c6cb81311d6 100644 --- a/converters/p5-Unicode-UTF8simple/Makefile +++ b/converters/p5-Unicode-UTF8simple/Makefile @@ -13,4 +13,6 @@ COMMENT= Conversions to/from UTF-8 from/to charactersets USES= perl5 USE_PERL5= configure +NO_ARCH= yes + .include diff --git a/converters/wkhtmltopdf/files/patch-configure b/converters/wkhtmltopdf/files/patch-configure index e891e62e62a..167565dc496 100644 --- a/converters/wkhtmltopdf/files/patch-configure +++ b/converters/wkhtmltopdf/files/patch-configure @@ -5,7 +5,7 @@ # Check gcc's version case "$(${QMAKE_CONF_COMPILER} -dumpversion)" in - 8*|7*|6*|5*|4*|3.4*) -+ 13*|12*|11*|10*|9*|8*|7*|6*|5*|4*|3.4*) ++ [1-9][0-9]*|[4-9]*|3.4*) ;; 3.3*) canBuildWebKit="no" diff --git a/converters/xml2c/Makefile b/converters/xml2c/Makefile index 6d6bf918245..9692f3e01ea 100644 --- a/converters/xml2c/Makefile +++ b/converters/xml2c/Makefile @@ -1,5 +1,6 @@ PORTNAME= xml2c PORTVERSION= 20151019 +PORTREVISION= 1 CATEGORIES= converters MASTER_SITES= https://acme.com/software/xml2c/ DISTNAME= ${PORTNAME}_19Oct2015 diff --git a/databases/akonadi/Makefile b/databases/akonadi/Makefile index 5a1cf9e3056..c4ded3ec6de 100644 --- a/databases/akonadi/Makefile +++ b/databases/akonadi/Makefile @@ -1,5 +1,6 @@ PORTNAME= akonadi DISTVERSION= ${KDE_APPLICATIONS_VERSION} +PORTREVISION= 1 CATEGORIES= databases kde kde-applications MAINTAINER= kde@FreeBSD.org diff --git a/databases/buzhug/Makefile b/databases/buzhug/Makefile index 972690f5f46..686a47251ac 100644 --- a/databases/buzhug/Makefile +++ b/databases/buzhug/Makefile @@ -9,7 +9,7 @@ MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTNAME}-${PORTVERSION} MAINTAINER= ports@FreeBSD.org COMMENT= Pure-Python database engine -USES= python:3.6+ zip +USES= dos2unix python:3.6+ zip USE_PYTHON= distutils autoplist concurrent optsuffix .include diff --git a/databases/buzhug/files/patch-2to3 b/databases/buzhug/files/patch-2to3 new file mode 100644 index 00000000000..8fe1a450790 --- /dev/null +++ b/databases/buzhug/files/patch-2to3 @@ -0,0 +1,1032 @@ +--- buzhug/buzhug_algos.py.orig 2022-03-15 18:59:32 UTC ++++ buzhug/buzhug_algos.py +@@ -28,11 +28,11 @@ def make_search_func(db,field,value): + if isinstance(value,(list,tuple)): + value = list(value) + if not len(value)==2: +- raise ValueError,"If argument is a list, only 2 values \ +- should be passed (found %s)" %len(value) ++ raise ValueError("If argument is a list, only 2 values \ ++ should be passed (found %s)" %len(value)) + if not db.fields[field] in [int,float,date,datetime]: +- raise TypeError,"Search between values is only allowed for " \ +- "int, float, date and datetime (found %s)" %db.fields[field] ++ raise TypeError("Search between values is only allowed for " \ ++ "int, float, date and datetime (found %s)" %db.fields[field]) + db._validate(field,value[0]) + db._validate(field,value[1]) + value.sort() +@@ -110,15 +110,15 @@ def fast_select(db,names,**args): + used to select a subset of record rows in field files + """ + # fixed and variable length fields +- f_args = [ (k,v) for k,v in args.iteritems() ++ f_args = [ (k,v) for k,v in args.items() + if hasattr(db._file[k],'block_len') ] +- v_args = [ (k,v) for (k,v) in args.iteritems() ++ v_args = [ (k,v) for (k,v) in args.items() + if not hasattr(db._file[k],'block_len') ] + arg_names = [ k for k,v in f_args + v_args ] + no_args = [ n for n in names if not n in arg_names ] + names = arg_names + no_args + +- [ db._file[k].seek(0) for k in names + args.keys() ] ++ [ db._file[k].seek(0) for k in names + list(args.keys()) ] + max_len = max([ db._file[k[0]].block_len for k in f_args ]) + num_blocks = db.BLOCKSIZE / max_len + funcs = dict([(k,make_search_func(db,k,v)) +@@ -148,7 +148,7 @@ def fast_select(db,names,**args): + res[bl_offset+c] = [ ranks[k][c] for k,v in f_args ] + bl_offset += num_blocks + +- fl_ranks = res.keys() ++ fl_ranks = list(res.keys()) + fl_ranks.sort() + + # The field files for the other arguments are browsed ; if their +@@ -162,7 +162,7 @@ def fast_select(db,names,**args): + for f in other_files: + f.seek(0) + +- for i,lines in enumerate(itertools.izip(*other_files)): ++ for i,lines in enumerate(zip(*other_files)): + try: + if i == fl_ranks[0]: + fl_ranks.pop(0) +--- buzhug/buzhug_files.py.orig 2022-03-15 18:59:32 UTC ++++ buzhug/buzhug_files.py +@@ -149,8 +149,8 @@ class StringFile(VariableLengthFile): + if value is None: + return '!\n' + elif not isinstance(value,str): +- raise ValueError,'Bad type : expected str, got %s %s' %(value, +- value.__class__) ++ raise ValueError('Bad type : expected str, got %s %s' %(value, ++ value.__class__)) + else: + # escape CR & LF so that the block is on one line + value = value.replace('\\','\\\\') +@@ -192,9 +192,9 @@ class UnicodeFile(StringFile): + def to_block(self,value): + if value is None: + return '!\n' +- elif not isinstance(value,unicode): +- raise ValueError,'Bad type : expected unicode, got %s %s' %(value, +- value.__class__) ++ elif not isinstance(value,str): ++ raise ValueError('Bad type : expected unicode, got %s %s' %(value, ++ value.__class__)) + else: + return StringFile.to_block(self,value.encode('utf-8')) + +@@ -220,8 +220,8 @@ class DateFile(VariableLengthFile): + if value is None: + return '!xxxxxxxx\n' + elif not isinstance(value,date): +- raise ValueError,'Bad type : expected datetime.date, got %s %s' \ +- %(value,value.__class__) ++ raise ValueError('Bad type : expected datetime.date, got %s %s' \ ++ %(value,value.__class__)) + else: + if value.year>=1900: + return value.strftime('-%Y%m%d')+'\n' +@@ -243,8 +243,8 @@ class DateTimeFile(VariableLengthFile): + if value is None: + return '!xxxxxxxxxxxxxx\n' + elif not isinstance(value,date): +- raise ValueError,'Bad type : expected datetime.date, got %s %s' \ +- %(value,value.__class__) ++ raise ValueError('Bad type : expected datetime.date, got %s %s' \ ++ %(value,value.__class__)) + else: + if value.year>=1900: + return value.strftime('-%Y%m%d%H%M%S')+'\n' +@@ -272,8 +272,8 @@ class TimeFile(VariableLengthFile): + if value is None: + return '!xxxxxx\n' + elif not isinstance(value, dtime): +- raise ValueError,'Bad type : expected datetime.time, got %s %s' \ +- %(value,value.__class__) ++ raise ValueError('Bad type : expected datetime.time, got %s %s' \ ++ %(value,value.__class__)) + else: + return value.strftime('-%H%M%S')+'\n' + +@@ -291,8 +291,8 @@ class BooleanFile(FixedLengthFile): + if value is None: + return '!'+chr(0) + elif not isinstance(value,bool): +- raise ValueError,'Bad type : expected bool, got %s %s' \ +- %(value,value.__class__) ++ raise ValueError('Bad type : expected bool, got %s %s' \ ++ %(value,value.__class__)) + else: + if value: + return '-1' +@@ -317,15 +317,15 @@ class IntegerFile(FixedLengthFile): + if value is None: + return '!'+chr(0)*4 + elif not isinstance(value,int): +- raise ValueError,'Bad type : expected int, got %s %s' \ +- %(value,value.__class__) ++ raise ValueError('Bad type : expected int, got %s %s' \ ++ %(value,value.__class__)) + else: +- if value <= -sys.maxint/2: +- raise OverflowError,"Integer value must be > %s, got %s" \ +- %(-sys.maxint/2,value) +- if value > sys.maxint/2: +- raise OverflowError,"Integer value must be <= %s, got %s" \ +- %(sys.maxint/2,value) ++ if value <= -sys.maxsize/2: ++ raise OverflowError("Integer value must be > %s, got %s" \ ++ %(-sys.maxsize/2,value)) ++ if value > sys.maxsize/2: ++ raise OverflowError("Integer value must be <= %s, got %s" \ ++ %(sys.maxsize/2,value)) + return '-'+struct.pack('>i',value+self.MIDINT) + + def from_block(self,block): +@@ -370,8 +370,8 @@ class FloatFile(FixedLengthFile): + if value is None: + return '!'+chr(0)*9 + elif not isinstance(value,float): +- raise ValueError,'Bad type : expected float, got %s %s' \ +- %(value,value.__class__) ++ raise ValueError('Bad type : expected float, got %s %s' \ ++ %(value,value.__class__)) + elif value == 0.0: + return '-'+chr(128)+chr(0)*8 + else: +--- buzhug/buzhug_info.py.orig 2022-03-15 18:59:32 UTC ++++ buzhug/buzhug_info.py +@@ -2,7 +2,7 @@ + field definitions with types and default values""" + + import os +-import urllib ++import urllib.request, urllib.parse, urllib.error + + def set_info(base,fields): + base.defaults = {} +@@ -13,28 +13,28 @@ def validate_field(base,field_def): + """Validate field definition""" + name,typ = field_def[:2] + if name in ['__id__','__version__']: +- raise ValueError,'Field name "%s" is reserved' %name ++ raise ValueError('Field name "%s" is reserved' %name) + elif name.startswith('_'): +- raise ValueError,"Error for %s : names can't begin with _" \ +- % name +- if typ not in base.types.values(): ++ raise ValueError("Error for %s : names can't begin with _" \ ++ % name) ++ if typ not in list(base.types.values()): + if isinstance(typ,base.__class__): # external link + base._register_base(typ) + else: +- raise TypeError,"type %s not allowed" %typ ++ raise TypeError("type %s not allowed" %typ) + if len(field_def)>2: + # if a default value is provided, check if it is valid + default = field_def[2] + if isinstance(typ,base.__class__): + if not hasattr(default.__class__,"db") or \ + not default.__class__.db is typ: +- raise ValueError,'Incorrect default value for field "%s"' \ ++ raise ValueError('Incorrect default value for field "%s"' \ + " : expected %s, got %s (class %s)" %(name,typ, +- default,default.__class__) ++ default,default.__class__)) + elif not isinstance(default,typ): +- raise ValueError,'Incorrect default value for field "%s"' \ ++ raise ValueError('Incorrect default value for field "%s"' \ + " : expected %s, got %s (class %s)" %(name,typ, +- default,default.__class__) ++ default,default.__class__)) + base.defaults[name] = default + else: + base.defaults[name] = None +@@ -45,13 +45,13 @@ def save_info(base): + fields = [] + for k in base.field_names: + if isinstance(base.fields[k],base.__class__): +- fields.append((k,''+urllib.quote(base.fields[k].name))) ++ fields.append((k,''+urllib.parse.quote(base.fields[k].name))) + else: + fields.append((k,base.fields[k].__name__)) + _info.write(' '.join(['%s:%s' %(k,v) for (k,v) in fields])) + _info.close() + out = open(os.path.join(base.name,"__defaults__"),"wb") +- for field_name,default_value in base.defaults.iteritems(): ++ for field_name,default_value in base.defaults.items(): + if field_name in ["__id__","__version__"]: + continue + value = base._file[field_name].to_block(default_value) +@@ -59,7 +59,7 @@ def save_info(base): + out.close() + + def read_defaults(base): +- import buzhug_files ++ from . import buzhug_files + defaults = dict([(f,None) for f in base.field_names[2:]]) + if os.path.exists(os.path.join(base.name,"__defaults__")): + defs = open(os.path.join(base.name,"__defaults__"),"rb").read() +--- buzhug/buzhug_test.py.orig 2022-03-15 18:59:32 UTC ++++ buzhug/buzhug_test.py +@@ -3,8 +3,8 @@ import random + import re + + from datetime import date, datetime, time as dtime +-from buzhug import Base, TS_Base, Record +-import buzhug,buzhug_files ++from .buzhug import Base, TS_Base, Record ++from . import buzhug,buzhug_files + + + names = ['pierre','claire','simon','camille','jean', +@@ -18,7 +18,7 @@ def run_test(thread_safe=False): + else: + db = TS_Base('dummy') + +- db.create(('name',str), ('fr_name',unicode), ++ db.create(('name',str), ('fr_name',str), + ('age',int), + ('size',int,300), + ('birth',date,date(1994,1,14)), +@@ -38,10 +38,10 @@ def run_test(thread_safe=False): + try: + assert cmp(afloat,0.0) == cmp(f.to_block(afloat),f.to_block(0.0)) + except: +- print afloat +- print "afloat > 0.0 ?",afloat>0.0 +- print "blocks ?",f.to_block(afloat)>f.to_block(0.0) +- print all(f.to_block(afloat)),all(f.to_block(0.0)) ++ print(afloat) ++ print("afloat > 0.0 ?",afloat>0.0) ++ print("blocks ?",f.to_block(afloat)>f.to_block(0.0)) ++ print(all(f.to_block(afloat)),all(f.to_block(0.0))) + raise + + assert db.defaults["age"] == None +@@ -52,7 +52,7 @@ def run_test(thread_safe=False): + + for i in range(100): + db.insert(name=random.choice(names), +- fr_name = unicode(random.choice(fr_names),'latin-1'), ++ fr_name = str(random.choice(fr_names),'latin-1'), + age=random.randint(7,47),size=random.randint(110,175), + birth=date(random.randint(1858,1999),random.randint(1,12),10), + afloat = random.uniform(-10**random.randint(-307,307), +@@ -60,8 +60,8 @@ def run_test(thread_safe=False): + birth_hour = dtime(random.randint(0, 23), random.randint(0, 59), random.randint(0, 59))) + + assert len(db)==100 +- assert isinstance(db[50].fr_name,unicode) +- print db[50].fr_name.encode('latin-1') ++ assert isinstance(db[50].fr_name,str) ++ print(db[50].fr_name.encode('latin-1')) + + db.open() + # test if default values have not been modified after open() +@@ -74,7 +74,7 @@ def run_test(thread_safe=False): + for i in range(5): + # insert a list + db.insert(random.choice(names), +- unicode(random.choice(fr_names),'latin-1'), ++ str(random.choice(fr_names),'latin-1'), + random.randint(7,47),random.randint(110,175), + date(random.randint(1958,1999),random.randint(1,12),10), + random.uniform(-10**random.randint(-307,307), +@@ -87,11 +87,11 @@ def run_test(thread_safe=False): + try: + assert getattr(db[-1],field) == db.defaults[field] + except: +- print "attribute %s not set to default value %s" %(field,db[-1]) ++ print("attribute %s not set to default value %s" %(field,db[-1])) + raise + + # insert as string +- db.set_string_format(unicode,'latin-1') ++ db.set_string_format(str,'latin-1') + db.set_string_format(date,'%d-%m-%y') + db.set_string_format(dtime,'%H-%M-%S') + db.insert_as_strings(name="testname",fr_name=random.choice(fr_names), +@@ -112,13 +112,13 @@ def run_test(thread_safe=False): + assert db[-1].afloat == 1.0 + + # search between 2 dates +- print '\nBirth between 1960 and 1970' ++ print('\nBirth between 1960 and 1970') + for r in db.select(None,birth=[date(1960,1,1),date(1970,12,13)]): +- print r.name,r.birth ++ print(r.name,r.birth) + +- print "sorted" ++ print("sorted") + for r in db.select(None,birth=[date(1960,1,1),date(1970,12,13)]).sort_by('+name-birth'): +- print r.name,r.birth ++ print(r.name,r.birth) + + f = buzhug_files.FloatFile().to_block + def all(v): +@@ -136,28 +136,28 @@ def run_test(thread_safe=False): + try: + assert len(s1) == len(s2) == len(s3) + except: +- print "%s records by list comprehension, " %len(s1) +- print "%s by select by formula," %len(s2) +- print "%s by select by interval" %len(s3) ++ print("%s records by list comprehension, " %len(s1)) ++ print("%s by select by formula," %len(s2)) ++ print("%s by select by interval" %len(s3)) + + for r in s1: + try: + assert r in s2 + except: +- print all(r.afloat) ++ print(all(r.afloat)) + + for r in s2: + try: + assert r in s1 + except: +- print "in select but not in list comprehension",r ++ print("in select but not in list comprehension",r) + raise + r = db[0] + assert r.__class__.db is db + + fr=random.choice(fr_names) +- s1 = [ r for r in db if r.age == 30 and r.fr_name == unicode(fr,'latin-1')] +- s2 = db.select(['name','fr_name'],age=30,fr_name = unicode(fr,'latin-1')) ++ s1 = [ r for r in db if r.age == 30 and r.fr_name == str(fr,'latin-1')] ++ s2 = db.select(['name','fr_name'],age=30,fr_name = str(fr,'latin-1')) + + assert len(s1)==len(s2) + +@@ -182,7 +182,7 @@ def run_test(thread_safe=False): + assert recs[0] == db[20] + + # check that has_key returns False for invalid hey +- assert not db.has_key(1000) ++ assert 1000 not in db + + # drop field + db.drop_field('name') +@@ -207,8 +207,8 @@ def run_test(thread_safe=False): + db.delete([db[10]]) + # check if record has been deleted + try: +- print db[10] +- raise Exception,"Row 10 should have been deleted" ++ print(db[10]) ++ raise Exception("Row 10 should have been deleted") + except IndexError: + pass + +@@ -273,19 +273,19 @@ def run_test(thread_safe=False): + + # check that record 10 is still deleted + try: +- print db[10] +- raise Exception,"Row 10 should have been deleted" ++ print(db[10]) ++ raise Exception("Row 10 should have been deleted") + except IndexError: + pass + +- print db.keys() +- print "has key 10 ?",db.has_key(10) ++ print(list(db.keys())) ++ print("has key 10 ?",10 in db) + assert 10 not in db + #raw_input() + + # check that deleted_lines was cleared by commit() + assert not db._pos.deleted_lines +- print db._del_rows.deleted_rows ++ print(db._del_rows.deleted_rows) + + length = len(db) # before cleanup + +@@ -297,8 +297,8 @@ def run_test(thread_safe=False): + + # check that record 10 is still deleted + try: +- print db[10] +- raise Exception,"Row 10 should have been deleted" ++ print(db[10]) ++ raise Exception("Row 10 should have been deleted") + except IndexError: + pass + +@@ -365,7 +365,7 @@ def run_test(thread_safe=False): + + addresses = ['Giono','Proust','Mauriac','Gide','Bernanos','Racine', + 'La Fontaine'] +- ks = db.keys() ++ ks = list(db.keys()) + for i in range(50): + x = random.choice(ks) + address = random.choice(addresses) +@@ -397,9 +397,9 @@ def run_test(thread_safe=False): + return item + + h1.set_record_class(DictRecord) +- print '\nrecord_class = DictRecord, h1[0]' +- print h1[0] +- print "\nResident name: %(resident.name)s\nAddress: %(address)s" % h1[0] ++ print('\nrecord_class = DictRecord, h1[0]') ++ print(h1[0]) ++ print("\nResident name: %(resident.name)s\nAddress: %(address)s" % h1[0]) + + if __name__ == "__main__": + run_test(thread_safe = True) +--- buzhug/buzhug.py.orig 2022-03-15 18:59:32 UTC ++++ buzhug/buzhug.py +@@ -88,14 +88,14 @@ Version 1.8 + + import os + import threading +-import cStringIO ++import io + import itertools + import token + import tokenize + import re + import tempfile + import shutil +-import urllib ++import urllib.request, urllib.parse, urllib.error + + import time + from datetime import date,datetime, time as dtime +@@ -106,9 +106,9 @@ try: + except NameError: + from sets import Set as set + +-from buzhug_files import * +-import buzhug_algos +-import buzhug_info ++from .buzhug_files import * ++from . import buzhug_algos ++from . import buzhug_info + + version = "1.8" + +@@ -133,12 +133,12 @@ class Record(list): + try: + ix = self.fields.index(k) + except ValueError: +- raise AttributeError,'No attribute named %s' %k ++ raise AttributeError('No attribute named %s' %k) + try: + return self.db.f_decode[self.types[ix]](list.__getitem__(self,ix)) + except: +- print 'error for key %s type %s value %s' %(k,self.types[ix], +- list.__getitem__(self,ix)) ++ print('error for key %s type %s value %s' %(k,self.types[ix], ++ list.__getitem__(self,ix))) + raise + + def __setattr__(self,k,v): +@@ -151,7 +151,7 @@ class Record(list): + def __repr__(self): + elts = [] + for k in self.fields: +- if not isinstance(getattr(self,k),unicode): ++ if not isinstance(getattr(self,k),str): + elts.append('%s:%s' %(k,getattr(self,k))) + else: + elts.append(('%s:%s' %(k,getattr(self,k))).encode('utf-8')) +@@ -190,17 +190,17 @@ class ResultSet(list): + """pretty print""" + col_width = width/len(self.names) + fmt = '%%-%ss' %col_width +- print '|'.join([fmt %name for name in self.names]) +- print '|'.join([fmt %('-'*col_width) for name in self.names]) ++ print('|'.join([fmt %name for name in self.names])) ++ print('|'.join([fmt %('-'*col_width) for name in self.names])) + for rec in self: + line = [] + for name in self.names: + v = fmt %getattr(rec,name) +- if not isinstance(getattr(rec,name),unicode): ++ if not isinstance(getattr(rec,name),str): + line.append(v) + else: + enc = line.append(v.encode('latin-1')) +- print '|'.join(line) ++ print('|'.join(line)) + + def sort_by(self,order): + """order is a string with field names separated by + or - +@@ -208,7 +208,7 @@ class ResultSet(list): + name, ascending surname and descending age""" + + # parse the order string +- e = cStringIO.StringIO(order).readline ++ e = io.StringIO(order).readline + cond = [] + order = '+' + for t in tokenize.generate_tokens(e): +@@ -216,11 +216,11 @@ class ResultSet(list): + ts = t[1] + if tt == 'OP': + if not ts in ['+','-']: +- raise SyntaxError,"Bad operator in sort condition: %s" %ts ++ raise SyntaxError("Bad operator in sort condition: %s" %ts) + order = ts + elif tt == 'NAME': + if not ts in self.names: +- raise ValueError,"Unknown sort field :%s" %ts ++ raise ValueError("Unknown sort field :%s" %ts) + cond.append((self.names.index(ts),order)) + # build the function order_func used to sort records + o_f = "def order_func(rec):\n" +@@ -232,7 +232,7 @@ class ResultSet(list): + else: + elts.append("buzhug_algos.rev(rec[%s])" %ix) + o_f += ",".join(elts) +"]" +- exec o_f in globals() # this creates the global function order_func ++ exec(o_f, globals()) # this creates the global function order_func + + # apply the key + try: +@@ -320,7 +320,7 @@ class Base: + + + types_map = [ (int,IntegerFile),(float,FloatFile), +- (str,StringFile),(unicode,UnicodeFile), ++ (str,StringFile),(str,UnicodeFile), + (date,DateFile),(datetime,DateTimeFile), (dtime, TimeFile), + (bool,BooleanFile)] + +@@ -387,14 +387,14 @@ class Base: + elif mode == 'open': + return self.open() + else: +- raise IOError,"Base %s already exists" %self.name ++ raise IOError("Base %s already exists" %self.name) + else: + if mode != 'open': +- raise IOError,"Directory %s already exists" %self.name ++ raise IOError("Directory %s already exists" %self.name) + else: +- raise IOError,"Mode 'open' : " \ ++ raise IOError("Mode 'open' : " \ + "Directory %s already exists but no info file found" \ +- %self.name ++ %self.name) + + self.field_names = [ f[0] for f in fields ] + self.fields = dict([(f[0],f[1]) for f in fields]) +@@ -431,11 +431,11 @@ class Base: + Raise IOError if no base is found for the path entered in __init__ + """ + if not os.path.exists(self.name) or not os.path.isdir(self.name): +- raise IOError,"Base %s doesn't exist" %self.name ++ raise IOError("Base %s doesn't exist" %self.name) + try: + _info = open(self.info_name,'rb') + except IOError: +- raise IOError,"No buzhug base in directory %s" %self.name ++ raise IOError("No buzhug base in directory %s" %self.name) + return self._open(_info) + + def _open(self,info): +@@ -445,7 +445,7 @@ class Base: + for (k,v) in fields: + if v.startswith(''): + # reference to an external base +- base_path = urllib.unquote(v[6:]) ++ base_path = urllib.parse.unquote(v[6:]) + ext_db = Base(base_path).open() + self._register_base(ext_db) + self.fields[k] = ext_db +@@ -474,7 +474,7 @@ class Base: + + def close(self): + """Close all files""" +- for f in self._file.values(): ++ for f in list(self._file.values()): + f.close() + self._pos.close() + self._id_pos.close() +@@ -508,19 +508,19 @@ class Base: + Return the identifier of the newly inserted record + """ + if args and kw: +- raise SyntaxError,"Can't use both positional and keyword arguments" ++ raise SyntaxError("Can't use both positional and keyword arguments") + if args: + # insert a list of values ordered like in the base definition + if not len(args) == len(self.field_names)-2: +- raise TypeError,"Expected %s arguments, found %s" \ +- %(len(self.field_names)-2,len(args)) +- return self.insert(**dict(zip(self.field_names[2:],args))) +- if '__id__' in kw.keys(): +- raise NameError,"Specifying the __id__ is not allowed" +- if '__version__' in kw.keys(): +- raise NameError,"Specifying the __version__ is not allowed" ++ raise TypeError("Expected %s arguments, found %s" \ ++ %(len(self.field_names)-2,len(args))) ++ return self.insert(**dict(list(zip(self.field_names[2:],args)))) ++ if '__id__' in list(kw.keys()): ++ raise NameError("Specifying the __id__ is not allowed") ++ if '__version__' in list(kw.keys()): ++ raise NameError("Specifying the __version__ is not allowed") + rec = dict([(f,self.defaults[f]) for f in self.field_names[2:]]) +- for (k,v) in kw.iteritems(): ++ for (k,v) in kw.items(): + self._validate(k,v) + rec[k] = v + # initial version = 0 +@@ -544,19 +544,19 @@ class Base: + - unicode : the format is the encoding + - date, datetime : format = the format string as defined in strftime + """ +- if class_ is unicode: ++ if class_ is str: + # test encoding ; will raise LookupError if invalid +- unicode('a').encode(format) ++ str('a').encode(format) + # create the conversion function bytestring -> unicode string + def _from_string(us): +- return unicode(us,format) +- self.from_string[unicode] = _from_string ++ return str(us,format) ++ self.from_string[str] = _from_string + elif class_ is date: + # test date format + d = date(1994,10,7) + t = time.strptime(d.strftime(format),format) + if not t[:3] == d.timetuple()[:3]: +- raise TimeFormatError,'%s is not a valid date format' %format ++ raise TimeFormatError('%s is not a valid date format' %format) + else: + # create the conversion function string -> date + def _from_string(ds): +@@ -567,8 +567,8 @@ class Base: + dt = datetime(1994,10,7,8,30,15) + t = time.strptime(dt.strftime(format),format) + if not t[:6] == dt.timetuple()[:6]: +- raise TimeFormatError,'%s is not a valid datetime format' \ +- %format ++ raise TimeFormatError('%s is not a valid datetime format' \ ++ %format) + else: + # create the conversion function string -> date + def _from_string(dts): +@@ -579,15 +579,15 @@ class Base: + dt = dtime(8,30,15) + t = time.strptime(dt.strftime(format),format) + if not t[3:6] == (dt.hour, dt.minute, dt.second): +- raise TimeFormatError,'%s is not a valid datetime.time format' \ +- %format ++ raise TimeFormatError('%s is not a valid datetime.time format' \ ++ %format) + else: + # create the conversion function string -> dtime + def _from_string(dts): + return dtime(*time.strptime(dts,format)[3:6]) + self.from_string[dtime] = _from_string + else: +- raise ValueError,"Can't specify a format for class %s" %class_ ++ raise ValueError("Can't specify a format for class %s" %class_) + + def insert_as_strings(self,*args,**kw): + """Insert a record with values provided as strings. They must be +@@ -595,32 +595,32 @@ class Base: + functions defined in the dictionary from_string + """ + if args and kw: +- raise SyntaxError,"Can't use both positional and keyword arguments" ++ raise SyntaxError("Can't use both positional and keyword arguments") + if args: + # insert a list of strings ordered like in the base definition + if not len(args) == len(self.field_names)-2: +- raise TypeError,"Expected %s arguments, found %s" \ +- %(len(self.field_names)-2,len(args)) +- return self.insert_as_strings(**dict(zip(self.field_names[2:], +- args))) ++ raise TypeError("Expected %s arguments, found %s" \ ++ %(len(self.field_names)-2,len(args))) ++ return self.insert_as_strings(**dict(list(zip(self.field_names[2:], ++ args)))) + return self.insert(**self.apply_types(**kw)) + + def apply_types(self,**kw): + """Transform the strings in kw values to their type + Return a dictionary with the same keys and converted values""" + or_kw = {} +- for k in kw.keys(): ++ for k in list(kw.keys()): + try: + t = self.fields[k] + except KeyError: +- raise NameError,"No field named %s" %k +- if not self.from_string.has_key(t): +- raise Exception,'No string format defined for %s' %t ++ raise NameError("No field named %s" %k) ++ if t not in self.from_string: ++ raise Exception('No string format defined for %s' %t) + else: + try: + or_kw[k] = self.from_string[t](kw[k]) + except: +- raise TypeError,"Can't convert %s into %s" %(kw[k],t) ++ raise TypeError("Can't convert %s into %s" %(kw[k],t)) + return or_kw + + def commit(self): +@@ -710,7 +710,7 @@ class Base: + # only field 'name' set + """ + res,names = self._select(names,request,**args) +- return ResultSet(names,res.values()) ++ return ResultSet(names,list(res.values())) + + def select_for_update(self,names=None,request=None,**args): + """Same syntax as select, only checks that the field __version__ +@@ -726,7 +726,7 @@ class Base: + else: + names += [ f for f in ['__id__','__version__'] if not f in names ] + res,names = self._select(names,request,**args) +- return ResultSet(names,res.values()) ++ return ResultSet(names,list(res.values())) + + def __call__(self,**kw): + return self.select_for_update(**kw) +@@ -745,7 +745,7 @@ class Base: + _names = self.field_names + + _namespace = {} +- if args.has_key('_namespace'): ++ if '_namespace' in args: + _namespace = args['_namespace'] + del args['_namespace'] + +@@ -755,7 +755,7 @@ class Base: + # the return value of match and search applied to the string + # stripped from its first and last character + regexps = [] +- for k,v in args.iteritems(): ++ for k,v in args.items(): + if type(v) is REGEXPTYPE: + _namespace[k] = Pattern(v) + regexps.append(k) +@@ -765,14 +765,14 @@ class Base: + del args[k] + + if _request is None: +- f_args = [ k for k in args.keys() ++ f_args = [ k for k in list(args.keys()) + if hasattr(self._file[k],'block_len') ] + # if there is at least one fixed length field to search, use the + # fast_select algorithm + if f_args: + res,names = buzhug_algos.fast_select(self,_names,**args) + _Record = makeRecordClass(self,self.record_class,names) +- for k in res.keys(): ++ for k in list(res.keys()): + res[k] = _Record(res[k]) + return res,names + conds = [] +@@ -780,17 +780,17 @@ class Base: + conds.append('%s == _c[%s]' %(k,i)) + _request = ' and '.join(conds) + _c = [] +- for (k,v) in args.iteritems(): ++ for (k,v) in args.items(): + t = self.fields[k] # field type + if isinstance(v,(tuple,list)): + _c.append([self.f_encode[t](x) for x in v]) + else: + _c.append(self.f_encode[t](v)) +- for n in args.keys(): ++ for n in list(args.keys()): + if not n in _names: + _names.append(n) + else: +- for (k,v) in args.iteritems(): ++ for (k,v) in args.items(): + if isinstance(v,Record): + # comparison with a record of another base + ft = self.file_types[self.types[v.db.name]] +@@ -832,7 +832,7 @@ class Base: + args.update(_namespace) + + # execute the loop +- exec loop in locals(),args ++ exec(loop, locals(),args) + + # exclude deleted rows from the results + if self._del_rows.deleted_rows: +@@ -854,11 +854,11 @@ class Base: + self.update(rec,**kw) + return + only_fixed_length = True +- if '__id__' in kw.keys(): +- raise NameError,"Can't update __id__" +- if '__version__' in kw.keys(): +- raise NameError,"Can't update __version__" +- for (k,v) in kw.iteritems(): ++ if '__id__' in list(kw.keys()): ++ raise NameError("Can't update __id__") ++ if '__version__' in list(kw.keys()): ++ raise NameError("Can't update __version__") ++ for (k,v) in kw.items(): + self._validate(k,v) + setattr(record,k,v) + if not hasattr(self.file_types[self.fields[k]], +@@ -867,7 +867,7 @@ class Base: + + if not hasattr(record,'__id__') or not hasattr(record,'__version__'): + # refuse to update a record that was not selected for update +- raise UpdateError,'The record was not selected for update' ++ raise UpdateError('The record was not selected for update') + + _id = record.__id__ + # line number of the record in position file +@@ -878,7 +878,7 @@ class Base: + # file is not the same, refuse to update + current_version = self[_id].__version__ + if not record.__version__ == current_version: +- raise ConflictError,'The record has changed since selection' ++ raise ConflictError('The record has changed since selection') + + # increment version + record.__version__ += 1 +@@ -888,7 +888,7 @@ class Base: + if only_fixed_length: + # only fixed length fields modified : just change the values + kw['__version__'] = record.__version__ +- for k,v in kw.iteritems(): ++ for k,v in kw.items(): + ix = self.field_names.index(k) + self._file[k].write_value_at_pos(field_pos[ix],v) + else: +@@ -926,7 +926,7 @@ class Base: + """Add a new field after the specified field, or in the beginning if + no field is specified""" + if field_name in self.field_names: +- raise NameError,"Field %s already exists" %field_name ++ raise NameError("Field %s already exists" %field_name) + field_def = [field_name,field_type] + if default is not None: + field_def.append(default) +@@ -937,7 +937,7 @@ class Base: + if after is None: + indx = 2 # insert after __version__ + elif not after in self.field_names: +- raise NameError,"No field named %s" %after ++ raise NameError("No field named %s" %after) + else: + indx = 1+self.field_names.index(after) + self.field_names.insert(indx,field_name) +@@ -955,11 +955,11 @@ class Base: + def drop_field(self,field_name): + """Remove the specified field name""" + if not field_name in self.field_names: +- raise NameError,"No field named %s" %field_name ++ raise NameError("No field named %s" %field_name) + if field_name == '__id__': +- raise ValueError,"Field __id__ can't be removed" ++ raise ValueError("Field __id__ can't be removed") + if field_name == '__version__': +- raise ValueError,"Field __version__ can't be removed" ++ raise ValueError("Field __version__ can't be removed") + indx = self.field_names.index(field_name) + self.field_names.remove(field_name) + del self.defaults[field_name] +@@ -972,8 +972,8 @@ class Base: + + def _validate(self,k,v): + """Validate the couple key,value""" +- if not k in self.fields.keys(): +- raise NameError,"No field named %s" %k ++ if not k in list(self.fields.keys()): ++ raise NameError("No field named %s" %k) + if v is None: + return + # if self.fields[k] is an instance of Base, the value must be an +@@ -981,21 +981,21 @@ class Base: + # db == self.fields[k] + if isinstance(self.fields[k],Base): + if not issubclass(v.__class__,Record): +- raise TypeError,"Bad type for %s : expected %s, got %s %s" \ +- %(k,self.fields[k],v,v.__class__) ++ raise TypeError("Bad type for %s : expected %s, got %s %s" \ ++ %(k,self.fields[k],v,v.__class__)) + if v.__class__.db.name != self.fields[k].name: +- raise TypeError,"Bad base for %s : expected %s, got %s" \ +- %(k,self.fields[k].name,v.__class__.db.name) ++ raise TypeError("Bad base for %s : expected %s, got %s" \ ++ %(k,self.fields[k].name,v.__class__.db.name)) + else: + if not isinstance(v,self.fields[k]): +- raise TypeError,"Bad type for %s : expected %s, got %s %s" \ +- %(k,self.fields[k],v,v.__class__) ++ raise TypeError("Bad type for %s : expected %s, got %s %s" \ ++ %(k,self.fields[k],v,v.__class__)) + + def _iterate(self,*names): + """_iterate on the specified names only""" + Record = makeRecordClass(self,self.record_class,names) + files = [ self._file[f] for f in names ] +- for record in itertools.izip(*files): ++ for record in zip(*files): + yield Record(record) + + def __getitem__(self,num): +@@ -1005,7 +1005,7 @@ class Base: + # first find the line in position file + block_pos = self._id_pos.get_block_at_pos(5*num) + if block_pos[0] == '#': +- raise IndexError,'No item at position %s' %num ++ raise IndexError('No item at position %s' %num) + else: + _id_pos = self._id_pos.from_block(block_pos) + # block in position file +@@ -1031,7 +1031,7 @@ class Base: + return True + + def __contains__(self,num): +- return self.has_key(num) ++ return num in self + + def keys(self): + return [ r.__id__ for r in self.select(['__id__']) ] +@@ -1042,7 +1042,7 @@ class Base: + remove the test record[0][0] != "#" + """ + files = [ self._file[f] for f in self.field_names ] +- for record in itertools.izip(*files): ++ for record in zip(*files): + if record[0][0] != "#": + r = self._full_rec(record) + yield r +--- buzhug/conversion_float.py.orig 2022-03-15 18:59:32 UTC ++++ buzhug/conversion_float.py +@@ -51,8 +51,8 @@ class FloatFile: + if value is None: + return '!'+chr(0)*9 + elif not isinstance(value,float): +- raise ValueError,'Bad type : expected float, got %s %s' \ +- %(value,value.__class__) ++ raise ValueError('Bad type : expected float, got %s %s' \ ++ %(value,value.__class__)) + else: + # get mantissa and exponent + # f = mant*2**exp, 0.5 <= abs(mant) < 1 +@@ -82,7 +82,7 @@ def conv(old): + # update base to new version + of = OldFloatFile() + nf = FloatFile() +- for (f,t) in old.fields.iteritems(): ++ for (f,t) in old.fields.items(): + if t is float: + old_path = db._file[f].path + new_path = os.path.join(db._file[f].base,"new_"+db._file[f].name) +@@ -94,10 +94,10 @@ def conv(old): + else: + new_block = nf.to_block(v) + if nf.from_block(new_block) != v: +- raise ValueError,"conversion error : %s != %s" \ +- %(v,nf.from_block(new_block)) ++ raise ValueError("conversion error : %s != %s" \ ++ %(v,nf.from_block(new_block))) + new_file.write(new_block) +- print i,"lines" ++ print(i,"lines") + new_file.close() + + # double-check if values are the same between old and new file +@@ -110,8 +110,8 @@ def conv(old): + break + new = new_file.read(bl) + if not of.from_block(old) == nf.from_block(new): +- raise ValueError, "conversion error : %s != %s" \ +- %(of.from_block(old),nf.from_block(new)) ++ raise ValueError("conversion error : %s != %s" \ ++ %(of.from_block(old),nf.from_block(new))) + + new_file.close() + # replace old file +@@ -122,10 +122,10 @@ def conv(old): + os.rename(db._file[f].path,os.path.join(db._file[f].base,backup_name)) + os.rename(new_path,old_path) + +-import buzhug +-import tkFileDialog ++from . import buzhug ++import tkinter.filedialog + +-path = tkFileDialog.askdirectory() ++path = tkinter.filedialog.askdirectory() + if path : + db = buzhug.Base(path).open() + conv(db) diff --git a/databases/evolution-data-server/Makefile b/databases/evolution-data-server/Makefile index 5ce4ce241f6..a23e65f12a3 100644 --- a/databases/evolution-data-server/Makefile +++ b/databases/evolution-data-server/Makefile @@ -2,6 +2,7 @@ PORTNAME= evolution-data-server DISTVERSION= 3.42.4 +PORTREVISION= 1 CATEGORIES= databases gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/databases/gmdb2/Makefile b/databases/gmdb2/Makefile index d636d09af3e..a91c097d497 100644 --- a/databases/gmdb2/Makefile +++ b/databases/gmdb2/Makefile @@ -3,6 +3,7 @@ PORTNAME= gmdb2 DISTVERSIONPREFIX= v DISTVERSION= 0.9.1 +PORTREVISION= 1 CATEGORIES= databases MAINTAINER= rhurlin@FreeBSD.org diff --git a/databases/kbibtex/Makefile b/databases/kbibtex/Makefile index a5234b987cf..21bad3e8757 100644 --- a/databases/kbibtex/Makefile +++ b/databases/kbibtex/Makefile @@ -2,7 +2,7 @@ PORTNAME= kbibtex DISTVERSION= 0.9.2 -PORTREVISION= 24 +PORTREVISION= 25 CATEGORIES= databases kde MASTER_SITES= KDE/stable/KBibTeX/${DISTVERSION} diff --git a/databases/libgda5-bdb/Makefile b/databases/libgda5-bdb/Makefile index 78fc3958368..9ce77ef15b7 100644 --- a/databases/libgda5-bdb/Makefile +++ b/databases/libgda5-bdb/Makefile @@ -1,6 +1,6 @@ # Created by: Joe Marcus Clarke -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases gnome PKGNAMESUFFIX= 5-bdb diff --git a/databases/libgda5-jdbc/Makefile b/databases/libgda5-jdbc/Makefile index 16d913424fc..e1df8fbd2bb 100644 --- a/databases/libgda5-jdbc/Makefile +++ b/databases/libgda5-jdbc/Makefile @@ -1,6 +1,6 @@ # Created by: Koop Mast -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases gnome PKGNAMESUFFIX= 5-jdbc diff --git a/databases/libgda5-ldap/Makefile b/databases/libgda5-ldap/Makefile index edc698a3a48..2246fff3923 100644 --- a/databases/libgda5-ldap/Makefile +++ b/databases/libgda5-ldap/Makefile @@ -1,6 +1,6 @@ # Created by: Joe Marcus Clarke -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= databases gnome PKGNAMESUFFIX= 5-ldap diff --git a/databases/libgda5-mdb/Makefile b/databases/libgda5-mdb/Makefile index 84c6aa0defd..63aac9119b7 100644 --- a/databases/libgda5-mdb/Makefile +++ b/databases/libgda5-mdb/Makefile @@ -1,6 +1,6 @@ # Created by: Joe Marcus Clarke -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= databases gnome PKGNAMESUFFIX= 5-mdb diff --git a/databases/libgda5-mysql/Makefile b/databases/libgda5-mysql/Makefile index 748e6152a45..bb67e7053ee 100644 --- a/databases/libgda5-mysql/Makefile +++ b/databases/libgda5-mysql/Makefile @@ -1,6 +1,6 @@ # Created by: Joe Marcus Clarke -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases gnome PKGNAMESUFFIX= 5-mysql diff --git a/databases/libgda5-postgresql/Makefile b/databases/libgda5-postgresql/Makefile index 59860c6fc6d..66bb087ad44 100644 --- a/databases/libgda5-postgresql/Makefile +++ b/databases/libgda5-postgresql/Makefile @@ -1,6 +1,6 @@ # Created by: Joe Marcus Clarke -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases gnome PKGNAMESUFFIX= 5-postgresql diff --git a/databases/libgda5-ui/Makefile b/databases/libgda5-ui/Makefile index 1793568f71d..4c5879b1783 100644 --- a/databases/libgda5-ui/Makefile +++ b/databases/libgda5-ui/Makefile @@ -1,6 +1,6 @@ # Created by: Joe Marcus Clarke -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases gnome PKGNAMESUFFIX= 5-ui diff --git a/databases/libgda5/Makefile b/databases/libgda5/Makefile index 930b453dde1..320f1c9edb3 100644 --- a/databases/libgda5/Makefile +++ b/databases/libgda5/Makefile @@ -2,6 +2,7 @@ PORTNAME= libgda DISTVERSION?= 5.2.10 +PORTREVISION= 1 CATEGORIES= databases gnome MASTER_SITES= GNOME PKGNAMESUFFIX?= 5 diff --git a/databases/libgdamm5/Makefile b/databases/libgdamm5/Makefile index 5e08ac3b26a..88fa643f188 100644 --- a/databases/libgdamm5/Makefile +++ b/databases/libgdamm5/Makefile @@ -2,6 +2,7 @@ PORTNAME= libgdamm PORTVERSION= 4.99.11 +PORTREVISION= 1 CATEGORIES= databases gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/databases/mariadb103-server/Makefile b/databases/mariadb103-server/Makefile index 7f25d0c246c..cfc2190caf5 100644 --- a/databases/mariadb103-server/Makefile +++ b/databases/mariadb103-server/Makefile @@ -2,7 +2,7 @@ PORTNAME?= mariadb PORTVERSION= 10.3.34 -PORTREVISION?= 1 +PORTREVISION?= 2 CATEGORIES= databases MASTER_SITES= http://ftp.nluug.nl/db/${SITESDIR} \ http://mirror2.hs-esslingen.de/pub/Mirrors/${SITESDIR}/ \ diff --git a/databases/mariadb104-server/Makefile b/databases/mariadb104-server/Makefile index c6bca66f257..52466169a4b 100644 --- a/databases/mariadb104-server/Makefile +++ b/databases/mariadb104-server/Makefile @@ -2,7 +2,7 @@ PORTNAME?= mariadb PORTVERSION= 10.4.24 -PORTREVISION?= 1 +PORTREVISION?= 2 CATEGORIES= databases MASTER_SITES= http://ftp.nluug.nl/db/${SITESDIR}/ \ http://mirror2.hs-esslingen.de/pub/Mirrors/${SITESDIR}/ \ diff --git a/databases/mariadb105-server/Makefile b/databases/mariadb105-server/Makefile index f998b0644d0..ace709b11ac 100644 --- a/databases/mariadb105-server/Makefile +++ b/databases/mariadb105-server/Makefile @@ -2,7 +2,7 @@ PORTNAME?= mariadb PORTVERSION= 10.5.15 -PORTREVISION?= 1 +PORTREVISION?= 2 CATEGORIES= databases MASTER_SITES= http://mirror2.hs-esslingen.de/pub/Mirrors/${SITESDIR}/ \ http://gd.tuwien.ac.at/db/${SITESDIR}/ \ diff --git a/databases/mariadb106-server/Makefile b/databases/mariadb106-server/Makefile index 5adaf88e465..5308d85a63e 100644 --- a/databases/mariadb106-server/Makefile +++ b/databases/mariadb106-server/Makefile @@ -2,7 +2,7 @@ PORTNAME?= mariadb PORTVERSION= 10.6.7 -PORTREVISION?= 1 +PORTREVISION?= 2 CATEGORIES= databases MASTER_SITES= http://mirrors.supportex.net/${SITESDIR}/ \ http://mirror2.hs-esslingen.de/pub/Mirrors/${SITESDIR}/ \ diff --git a/databases/mydumper/Makefile b/databases/mydumper/Makefile index 0c612ee314a..3d5fed1de50 100644 --- a/databases/mydumper/Makefile +++ b/databases/mydumper/Makefile @@ -1,7 +1,7 @@ # Created by: Gea-Suan Lin PORTNAME= mydumper -DISTVERSION= 0.11.5-2 +PORTVERSION= 0.12.1 DISTVERSIONPREFIX= v CATEGORIES= databases @@ -21,7 +21,6 @@ CMAKE_OFF= BUILD_DOCS WITH_BINLOG PLIST_FILES= sbin/mydumper sbin/myloader -GH_ACCOUNT= maxbube USE_GITHUB= yes post-patch: diff --git a/databases/mydumper/distinfo b/databases/mydumper/distinfo index b72e4a10fdb..c3a7513c56c 100644 --- a/databases/mydumper/distinfo +++ b/databases/mydumper/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643970972 -SHA256 (maxbube-mydumper-v0.11.5-2_GH0.tar.gz) = 5ca8fbca52abaf42d0c21eca2a182772a958709b3b150bfd3c10a47496591727 -SIZE (maxbube-mydumper-v0.11.5-2_GH0.tar.gz) = 110088 +TIMESTAMP = 1647264420 +SHA256 (mydumper-mydumper-v0.12.1_GH0.tar.gz) = f3c8ae09573d9a37512984cff24ade1cd87b50ae772944ef57d5bd1d5fac8e5b +SIZE (mydumper-mydumper-v0.12.1_GH0.tar.gz) = 114268 diff --git a/databases/mydumper/pkg-descr b/databases/mydumper/pkg-descr index 0384e69ae42..2834898e1de 100644 --- a/databases/mydumper/pkg-descr +++ b/databases/mydumper/pkg-descr @@ -1,12 +1,16 @@ -== What is mydumper? Why? == +MyDumper is a MySQL Logical Backup Tool. It has 2 tools: +- mydumper which is responsible to export a consistent backup of MySQL databases +- myloader reads the backup from mydumper, connects the to destination database + and imports the backup. Both tools use multithreading capabilities -* Parallelism (hence, speed) and performance (avoids expensive character set +Why do we need MyDumper? +- Parallelism (hence, speed) and performance (avoids expensive character set conversion routines, efficient code overall) -* Easier to manage output (separate files for tables, dump metadata, etc, easy +- Easier to manage output (separate files for tables, dump metadata, etc, easy to view/parse data) -* Consistency - maintains snapshot across all threads, provides accurate master +- Consistency - maintains snapshot across all threads, provides accurate master and slave log positions, etc -* Manageability - supports PCRE for specifying database and tables inclusions +- Manageability - supports PCRE for specifying database and tables inclusions and exclusions -WWW: https://github.com/maxbube/mydumper +WWW: https://github.com/mydumper/mydumper diff --git a/databases/p5-Search-Xapian/Makefile b/databases/p5-Search-Xapian/Makefile index b89ec32e660..208d9c37f0e 100644 --- a/databases/p5-Search-Xapian/Makefile +++ b/databases/p5-Search-Xapian/Makefile @@ -1,7 +1,7 @@ # Created by: Lars Balker Rasmussen PORTNAME= Search-Xapian -PORTVERSION= 1.2.25.4 +PORTVERSION= 1.2.25.5 CATEGORIES= databases perl5 MASTER_SITES= https://oligarchy.co.uk/xapian/${PORTVERSION:R}/ \ LOCAL/sunpoet \ diff --git a/databases/p5-Search-Xapian/distinfo b/databases/p5-Search-Xapian/distinfo index 4c503355261..d11c8b1fe5d 100644 --- a/databases/p5-Search-Xapian/distinfo +++ b/databases/p5-Search-Xapian/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1605901620 -SHA256 (Search-Xapian-1.2.25.4.tar.gz) = 871943199b80efd98e31f4b4711bb070a576c91be692193d8a43affad64574dd -SIZE (Search-Xapian-1.2.25.4.tar.gz) = 75336 +TIMESTAMP = 1647264452 +SHA256 (Search-Xapian-1.2.25.5.tar.gz) = 204fbdc712d6711ffab668c1f4cfc007b639a9fb64ad7e19cb20fc10a910ba8b +SIZE (Search-Xapian-1.2.25.5.tar.gz) = 74960 diff --git a/databases/pg_citus/Makefile b/databases/pg_citus/Makefile index 1f440803605..0afd73d5561 100644 --- a/databases/pg_citus/Makefile +++ b/databases/pg_citus/Makefile @@ -1,7 +1,7 @@ # Created by: Matthew Seaman PORTNAME= citus -PORTVERSION= 10.2.4 +PORTVERSION= 10.2.5 DISTVERSIONPREFIX= v CATEGORIES= databases PKGNAMEPREFIX= pg_ diff --git a/databases/pg_citus/distinfo b/databases/pg_citus/distinfo index ba52ccf83a0..0bb74139901 100644 --- a/databases/pg_citus/distinfo +++ b/databases/pg_citus/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1644068560 -SHA256 (citusdata-citus-v10.2.4_GH0.tar.gz) = bfe62893ad8b571737b38e9b6bca35650fdf8f89ce9da3996d6634090c97db7e -SIZE (citusdata-citus-v10.2.4_GH0.tar.gz) = 5535944 +TIMESTAMP = 1648305653 +SHA256 (citusdata-citus-v10.2.5_GH0.tar.gz) = 748beaf219163468f0b92bf5315798457f9859a6cd9069a7fd03208d8d231176 +SIZE (citusdata-citus-v10.2.5_GH0.tar.gz) = 5538309 diff --git a/databases/pgadmin3/Makefile b/databases/pgadmin3/Makefile index d6786b03d16..801854a09f0 100644 --- a/databases/pgadmin3/Makefile +++ b/databases/pgadmin3/Makefile @@ -2,7 +2,7 @@ PORTNAME= pgadmin3 PORTVERSION= 1.22.2 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= databases MASTER_SITES= PGSQL/pgadmin/pgadmin3/v${PORTVERSION}/src DIST_SUBDIR= postgresql diff --git a/databases/pgbackrest/Makefile b/databases/pgbackrest/Makefile index 8dd52569bbe..32db3b27319 100644 --- a/databases/pgbackrest/Makefile +++ b/databases/pgbackrest/Makefile @@ -1,5 +1,6 @@ PORTNAME= pgbackrest DISTVERSION= 2.35 +PORTREVISION= 1 CATEGORIES= databases MAINTAINER= schoutm@gmail.com diff --git a/databases/pgbarman/Makefile b/databases/pgbarman/Makefile index 5f0a29d74dc..1ceeb4ff851 100644 --- a/databases/pgbarman/Makefile +++ b/databases/pgbarman/Makefile @@ -2,7 +2,7 @@ PORTNAME= barman DISTVERSIONPREFIX= release/ -DISTVERSION= 2.18 +DISTVERSION= 2.19 CATEGORIES= databases PKGNAMEPREFIX= pg PKGNAMESUFFIX= ${PYTHON_PKGNAMESUFFIX} diff --git a/databases/pgbarman/distinfo b/databases/pgbarman/distinfo index a18cfbc73bb..c1330a4c8a0 100644 --- a/databases/pgbarman/distinfo +++ b/databases/pgbarman/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1643228211 -SHA256 (barman-2.18-manual.pdf) = 89ab4403e1a780204b8238a663cc73186bbccf78b0a6e062c06d474a9a2595b6 -SIZE (barman-2.18-manual.pdf) = 1343143 -SHA256 (EnterpriseDB-barman-release-2.18_GH0.tar.gz) = 9c2cf5f126e56ef9e7497df4c5a4c056df50b33aef19c0565807eb6c30da8f92 -SIZE (EnterpriseDB-barman-release-2.18_GH0.tar.gz) = 1412704 +TIMESTAMP = 1648305612 +SHA256 (barman-2.19-manual.pdf) = 92be4191ebb4f5f58cdec093929471d8bdb3358ae5fe475676a26434eb2af669 +SIZE (barman-2.19-manual.pdf) = 1347694 +SHA256 (EnterpriseDB-barman-release-2.19_GH0.tar.gz) = d153135a1a1581a59e851a4c1bca95a1934eececda5202bccddfa333926a1e25 +SIZE (EnterpriseDB-barman-release-2.19_GH0.tar.gz) = 1423229 diff --git a/databases/pgmodeler/Makefile b/databases/pgmodeler/Makefile index 20f7793f693..e472b9cf172 100644 --- a/databases/pgmodeler/Makefile +++ b/databases/pgmodeler/Makefile @@ -2,6 +2,7 @@ PORTNAME= pgmodeler PORTVERSION= 0.9.4 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= databases diff --git a/databases/pointcloud/Makefile b/databases/pointcloud/Makefile index 435d6b8b85c..3c205cd68e8 100644 --- a/databases/pointcloud/Makefile +++ b/databases/pointcloud/Makefile @@ -2,7 +2,7 @@ PORTNAME= pointcloud PORTVERSION= 1.2.1 -PORTREVISION= 2 +PORTREVISION= 3 DISTVERSIONPREFIX= v CATEGORIES= databases geography diff --git a/databases/postgis30/Makefile b/databases/postgis30/Makefile index 0e986708fa5..c80d7f772e5 100644 --- a/databases/postgis30/Makefile +++ b/databases/postgis30/Makefile @@ -2,6 +2,7 @@ PORTNAME= postgis PORTVERSION= 3.0.5 +PORTREVISION= 1 CATEGORIES= databases geography MASTER_SITES= https://download.osgeo.org/postgis/source/ PKGNAMESUFFIX= 30 diff --git a/databases/postgis31/Makefile b/databases/postgis31/Makefile index b2adf0d8cf2..281be0c95af 100644 --- a/databases/postgis31/Makefile +++ b/databases/postgis31/Makefile @@ -1,5 +1,6 @@ PORTNAME= postgis PORTVERSION= 3.1.5 +PORTREVISION= 1 CATEGORIES= databases geography MASTER_SITES= https://download.osgeo.org/postgis/source/ PKGNAMESUFFIX= 31 diff --git a/databases/postgis32/Makefile b/databases/postgis32/Makefile index 19961c68653..b4ce87b32dc 100644 --- a/databases/postgis32/Makefile +++ b/databases/postgis32/Makefile @@ -1,5 +1,6 @@ PORTNAME= postgis DISTVERSION= 3.2.1 +PORTREVISION= 1 CATEGORIES= databases geography MASTER_SITES= https://download.osgeo.org/postgis/source/ PKGNAMESUFFIX= 32 diff --git a/databases/postgresql10-contrib/Makefile b/databases/postgresql10-contrib/Makefile index 099c55e452f..49a48451234 100644 --- a/databases/postgresql10-contrib/Makefile +++ b/databases/postgresql10-contrib/Makefile @@ -1,7 +1,7 @@ # Created by: Palle Girgensohn PORTNAME= postgresql -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases MAINTAINER= pgsql@FreeBSD.org diff --git a/databases/postgresql11-contrib/Makefile b/databases/postgresql11-contrib/Makefile index 32385d215e0..29b0b3f4915 100644 --- a/databases/postgresql11-contrib/Makefile +++ b/databases/postgresql11-contrib/Makefile @@ -1,7 +1,7 @@ # Created by: Palle Girgensohn PORTNAME= postgresql -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases MAINTAINER= pgsql@FreeBSD.org diff --git a/databases/postgresql12-contrib/Makefile b/databases/postgresql12-contrib/Makefile index 9f992af4843..b8f7483964a 100644 --- a/databases/postgresql12-contrib/Makefile +++ b/databases/postgresql12-contrib/Makefile @@ -1,7 +1,7 @@ # Created by: Palle Girgensohn PORTNAME= postgresql -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases MAINTAINER= pgsql@FreeBSD.org diff --git a/databases/postgresql13-contrib/Makefile b/databases/postgresql13-contrib/Makefile index 433ee810d61..1b56472b1af 100644 --- a/databases/postgresql13-contrib/Makefile +++ b/databases/postgresql13-contrib/Makefile @@ -1,7 +1,7 @@ # Created by: Palle Girgensohn PORTNAME= postgresql -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases MAINTAINER= pgsql@FreeBSD.org diff --git a/databases/postgresql14-contrib/Makefile b/databases/postgresql14-contrib/Makefile index 1506d473094..d4811d4268d 100644 --- a/databases/postgresql14-contrib/Makefile +++ b/databases/postgresql14-contrib/Makefile @@ -1,7 +1,7 @@ # Created by: Palle Girgensohn PORTNAME= postgresql -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= databases MAINTAINER= pgsql@FreeBSD.org diff --git a/databases/py-Elixir/files/patch-2to3 b/databases/py-Elixir/files/patch-2to3 new file mode 100644 index 00000000000..ab5974dac8a --- /dev/null +++ b/databases/py-Elixir/files/patch-2to3 @@ -0,0 +1,190 @@ +--- elixir/entity.py.orig 2009-11-13 19:50:38 UTC ++++ elixir/entity.py +@@ -3,7 +3,7 @@ This module provides the ``Entity`` base class, as wel + ``EntityMeta``. + ''' + +-from py23compat import sorted ++from .py23compat import sorted + + import sys + import types +@@ -172,7 +172,7 @@ class EntityDescriptor(object): + self.identity = self.identity(entity) + + if self.polymorphic: +- if not isinstance(self.polymorphic, basestring): ++ if not isinstance(self.polymorphic, str): + self.polymorphic = options.DEFAULT_POLYMORPHIC_COL_NAME + + #--------------------- +@@ -226,7 +226,7 @@ class EntityDescriptor(object): + if col.primary_key: + self.add_column(col.copy()) + elif not self.has_pk and self.auto_primarykey: +- if isinstance(self.auto_primarykey, basestring): ++ if isinstance(self.auto_primarykey, str): + colname = self.auto_primarykey + else: + colname = options.DEFAULT_AUTO_PRIMARYKEY_NAME +@@ -298,7 +298,7 @@ class EntityDescriptor(object): + options.POLYMORPHIC_COL_TYPE)) + + if self.version_id_col: +- if not isinstance(self.version_id_col, basestring): ++ if not isinstance(self.version_id_col, str): + self.version_id_col = options.DEFAULT_VERSION_ID_COL_NAME + self.add_column(Column(self.version_id_col, Integer)) + +@@ -306,7 +306,7 @@ class EntityDescriptor(object): + self.entity.table = Table(self.tablename, self.metadata, + *args, **kwargs) + if DEBUG: +- print self.entity.table.repr2() ++ print(self.entity.table.repr2()) + + def setup_reltables(self): + self.call_builders('create_tables') +@@ -365,7 +365,7 @@ class EntityDescriptor(object): + return children + + def translate_order_by(self, order_by): +- if isinstance(order_by, basestring): ++ if isinstance(order_by, str): + order_by = [order_by] + + order = [] +@@ -505,12 +505,12 @@ class EntityDescriptor(object): + # get one in any case. + table = type.__getattribute__(self.entity, 'table') + if table is not None: +- if check_duplicate and col.key in table.columns.keys(): ++ if check_duplicate and col.key in list(table.columns.keys()): + raise Exception("Column '%s' already exist in table '%s' ! " % + (col.key, table.name)) + table.append_column(col) + if DEBUG: +- print "table.append_column(%s)" % col ++ print("table.append_column(%s)" % col) + + def add_constraint(self, constraint): + self.constraints.append(constraint) +@@ -537,7 +537,7 @@ class EntityDescriptor(object): + if mapper: + mapper.add_property(name, property) + if DEBUG: +- print "mapper.add_property('%s', %s)" % (name, repr(property)) ++ print("mapper.add_property('%s', %s)" % (name, repr(property))) + + def add_mapper_extension(self, extension): + extensions = self.mapper_options.get('extension', []) +@@ -795,7 +795,7 @@ def instrument_class(cls): + + # Process attributes (using the assignment syntax), looking for + # 'Property' instances and attaching them to this entity. +- properties = [(name, attr) for name, attr in cls.__dict__.iteritems() ++ properties = [(name, attr) for name, attr in cls.__dict__.items() + if isinstance(attr, Property)] + sorted_props = sorted(base_props + properties, + key=lambda i: i[1]._counter) +@@ -924,7 +924,7 @@ def setup_entities(entities): + # delete all Elixir properties so that it doesn't interfere with + # SQLAlchemy. At this point they should have be converted to + # builders. +- for name, attr in entity.__dict__.items(): ++ for name, attr in list(entity.__dict__.items()): + if isinstance(attr, Property): + delattr(entity, name) + +@@ -1004,7 +1004,7 @@ class EntityBase(object): + self.set(**kwargs) + + def set(self, **kwargs): +- for key, value in kwargs.iteritems(): ++ for key, value in kwargs.items(): + setattr(self, key, value) + + def update_or_create(cls, data, surrogate=True): +@@ -1038,7 +1038,7 @@ class EntityBase(object): + + mapper = sqlalchemy.orm.object_mapper(self) + +- for key, value in data.iteritems(): ++ for key, value in data.items(): + if isinstance(value, dict): + dbvalue = getattr(self, key) + rel_class = mapper.get_property(key).mapper.class_ +@@ -1074,7 +1074,7 @@ class EntityBase(object): + if isinstance(p, ColumnProperty)] + data = dict([(name, getattr(self, name)) + for name in col_prop_names if name not in exclude]) +- for rname, rdeep in deep.iteritems(): ++ for rname, rdeep in deep.items(): + dbdata = getattr(self, rname) + #FIXME: use attribute names (ie coltoprop) instead of column names + fks = self.mapper.get_property(rname).remote_side +@@ -1145,7 +1145,7 @@ class EntityBase(object): + get = classmethod(get) + + +-class Entity(EntityBase): ++class Entity(EntityBase, metaclass=EntityMeta): + ''' + The base class for all entities + +@@ -1167,6 +1167,5 @@ class Entity(EntityBase): + For further information, please refer to the provided examples or + tutorial. + ''' +- __metaclass__ = EntityMeta + + +--- elixir/py23compat.py.orig 2009-10-02 10:19:50 UTC ++++ elixir/py23compat.py +@@ -11,7 +11,7 @@ orig_cmp = cmp + def sort_list(l, cmp=None, key=None, reverse=False): + try: + l.sort(cmp, key, reverse) +- except TypeError, e: ++ except TypeError as e: + if not str(e).startswith('sort expected at most 1 arguments'): + raise + if cmp is None: +--- elixir/relationships.py.orig 2009-11-13 20:04:26 UTC ++++ elixir/relationships.py +@@ -412,7 +412,7 @@ from sqlalchemy import ForeignKeyConstraint, Column, T + from sqlalchemy.orm import relation, backref, class_mapper + from sqlalchemy.ext.associationproxy import association_proxy + +-import options ++from . import options + from elixir.statements import ClassMutator + from elixir.properties import Property + from elixir.entity import EntityMeta, DEBUG +@@ -495,7 +495,7 @@ class Relationship(Property): + + def target(self): + if not self._target: +- if isinstance(self.of_kind, basestring): ++ if isinstance(self.of_kind, str): + collection = self.entity._descriptor.collection + self._target = collection.resolve(self.of_kind, self.entity) + else: +@@ -1115,7 +1115,7 @@ class ManyToMany(Relationship): + self.table = Table(tablename, e1_desc.metadata, + schema=schema, *args, **complete_kwargs) + if DEBUG: +- print self.table.repr2() ++ print(self.table.repr2()) + + def _build_join_clauses(self): + # In the case we have a self-reference, we need to build join clauses +@@ -1222,7 +1222,7 @@ def _get_join_clauses(local_table, local_cols1, local_ + # match. + + #TODO: rewrite this. Even with the comment, I don't even understand it myself. +- for cols, constraint in constraint_map.iteritems(): ++ for cols, constraint in constraint_map.items(): + if cols == cols1 or (cols != cols2 and + not cols1 and (cols2 in constraint_map or + cols2 is None)): diff --git a/databases/py-dbf/Makefile b/databases/py-dbf/Makefile index 787ab8e1fd9..fa696d6effc 100644 --- a/databases/py-dbf/Makefile +++ b/databases/py-dbf/Makefile @@ -11,7 +11,7 @@ COMMENT= Pure python package for reading/writing dbf files LICENSE= BSD3CLAUSE -USES= python:3.6+ +USES= dos2unix python:3.6+ USE_PYTHON= autoplist distutils NO_ARCH= yes diff --git a/databases/py-dbf/files/patch-2to3 b/databases/py-dbf/files/patch-2to3 new file mode 100644 index 00000000000..f9ad71a6c72 --- /dev/null +++ b/databases/py-dbf/files/patch-2to3 @@ -0,0 +1,13 @@ +--- dbf/_index.py.orig 2022-03-15 19:06:56 UTC ++++ dbf/_index.py +@@ -9,8 +9,8 @@ class IndexFile(object): + filename += '.pdx' + if not os.path.exists(filename): + self.index_file = open(filename, 'r+b') +- self.index_file.write('\xea\xaf\x37\xbf' # signature +- '\x00'*8 # two non-existant lists ++ self.index_file.write('\xea\xaf\x37\xbf', # signature ++ '\x00'*8, # two non-existant lists + '\x00'*500) # and no indices + return + index_file = self.index_file = open(filename, 'r+b') diff --git a/databases/py-geoalchemy2/Makefile b/databases/py-geoalchemy2/Makefile index 420687dfdc1..425d872eaa4 100644 --- a/databases/py-geoalchemy2/Makefile +++ b/databases/py-geoalchemy2/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= geoalchemy2 -PORTVERSION= 0.10.2 +PORTVERSION= 0.11.1 CATEGORIES= databases geography python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/databases/py-geoalchemy2/distinfo b/databases/py-geoalchemy2/distinfo index 1a5af3600f3..7c2940325ce 100644 --- a/databases/py-geoalchemy2/distinfo +++ b/databases/py-geoalchemy2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133727 -SHA256 (GeoAlchemy2-0.10.2.tar.gz) = 3db833746e11bc802b754751ec94eaab81009a9ad8fe647d461fe76d1a47a3fd -SIZE (GeoAlchemy2-0.10.2.tar.gz) = 135383 +TIMESTAMP = 1647264492 +SHA256 (GeoAlchemy2-0.11.1.tar.gz) = f92a0faddb5b74384dbbf3c7000433358ce8e07a180fe1d6c2843eaa0437ff08 +SIZE (GeoAlchemy2-0.11.1.tar.gz) = 147467 diff --git a/databases/py-marshmallow-sqlalchemy/Makefile b/databases/py-marshmallow-sqlalchemy/Makefile index 556a8c0441a..b9959c61fda 100644 --- a/databases/py-marshmallow-sqlalchemy/Makefile +++ b/databases/py-marshmallow-sqlalchemy/Makefile @@ -1,7 +1,7 @@ # Created by: Mark Felder PORTNAME= marshmallow-sqlalchemy -PORTVERSION= 0.27.0 +PORTVERSION= 0.28.0 CATEGORIES= databases python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -17,7 +17,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}marshmallow>=3.0.0:devel/py-marshmallow@${PY TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0:devel/py-pytest@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pytest-lazy-fixture>=0:devel/py-pytest-lazy-fixture@${PY_FLAVOR} \ -USES= python:3.6+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/databases/py-marshmallow-sqlalchemy/distinfo b/databases/py-marshmallow-sqlalchemy/distinfo index 6f9b4ca9ab0..a677f9f5748 100644 --- a/databases/py-marshmallow-sqlalchemy/distinfo +++ b/databases/py-marshmallow-sqlalchemy/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641045884 -SHA256 (marshmallow-sqlalchemy-0.27.0.tar.gz) = 1521b129564444648c523a38f6446c137f1aae5c9c7de1ec151d5ebf03fd407d -SIZE (marshmallow-sqlalchemy-0.27.0.tar.gz) = 51056 +TIMESTAMP = 1647264494 +SHA256 (marshmallow-sqlalchemy-0.28.0.tar.gz) = fb6b06686f38fec2ea0ec53a5ee4979219409e2b2260f9bc91e4b43105d19782 +SIZE (marshmallow-sqlalchemy-0.28.0.tar.gz) = 51611 diff --git a/databases/py-motor/files/patch-asyncio b/databases/py-motor/files/patch-asyncio new file mode 100644 index 00000000000..fa2ae83652a --- /dev/null +++ b/databases/py-motor/files/patch-asyncio @@ -0,0 +1,16 @@ +The asyncio package has been included in the standard library since Python 3.4. + +--- motor/frameworks/asyncio/__init__.py.orig 2016-10-26 16:39:52 UTC ++++ motor/frameworks/asyncio/__init__.py +@@ -25,10 +25,7 @@ import multiprocessing + import sys + from concurrent.futures import ThreadPoolExecutor + +-try: +- from asyncio import ensure_future +-except ImportError: +- from asyncio import async as ensure_future ++from asyncio import ensure_future + + CLASS_PREFIX = 'AsyncIO' + diff --git a/databases/py-mycli/Makefile b/databases/py-mycli/Makefile index c101a27dc1e..21a9e9b723c 100644 --- a/databases/py-mycli/Makefile +++ b/databases/py-mycli/Makefile @@ -2,6 +2,7 @@ PORTNAME= mycli PORTVERSION= 1.24.3 +PORTREVISION= 1 CATEGORIES= databases python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/databases/py-mycli/files/patch-setup.py b/databases/py-mycli/files/patch-setup.py index 69cbfbde055..aeb5be3e4e1 100644 --- a/databases/py-mycli/files/patch-setup.py +++ b/databases/py-mycli/files/patch-setup.py @@ -9,3 +9,12 @@ 'pyaes >= 1.6.1' ] +@@ -92,7 +92,7 @@ setup( + author_email='mycli-dev@googlegroups.com', + version=version, + url='http://mycli.net', +- packages=find_packages(), ++ packages=find_packages(exclude=['test*']), + package_data={'mycli': ['myclirc', 'AUTHORS', 'SPONSORS']}, + description=description, + long_description=description, diff --git a/databases/py-pgspecial/Makefile b/databases/py-pgspecial/Makefile index b6a15c74323..7ee60f0d813 100644 --- a/databases/py-pgspecial/Makefile +++ b/databases/py-pgspecial/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= pgspecial -PORTVERSION= 1.13.0 +PORTVERSION= 1.13.1 CATEGORIES= databases python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/databases/py-pgspecial/distinfo b/databases/py-pgspecial/distinfo index 52c2033c420..e53bf912c54 100644 --- a/databases/py-pgspecial/distinfo +++ b/databases/py-pgspecial/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1621699703 -SHA256 (pgspecial-1.13.0.tar.gz) = 3847e205b19469f16ded05bda24b4758056d67ade4075a5ded4ce6628a9bad01 -SIZE (pgspecial-1.13.0.tar.gz) = 47855 +TIMESTAMP = 1647264496 +SHA256 (pgspecial-1.13.1.tar.gz) = d5dab96690908275916dc2c622efae217f142e08165fa366949d41cbc5658701 +SIZE (pgspecial-1.13.1.tar.gz) = 49822 diff --git a/databases/py-psycopg-c/Makefile b/databases/py-psycopg-c/Makefile index 5a2f8466064..61981932368 100644 --- a/databases/py-psycopg-c/Makefile +++ b/databases/py-psycopg-c/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= psycopg-c -PORTVERSION= 3.0.9 +PORTVERSION= 3.0.10 CATEGORIES= databases python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/databases/py-psycopg-c/distinfo b/databases/py-psycopg-c/distinfo index a8e2bbf5acb..7bb5ddbc864 100644 --- a/databases/py-psycopg-c/distinfo +++ b/databases/py-psycopg-c/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057922 -SHA256 (psycopg-c-3.0.9.tar.gz) = c9c5dc35afdea978c1331e19b9f7976e0354e66ee559a05a3bccaf893e8ccac5 -SIZE (psycopg-c-3.0.9.tar.gz) = 589102 +TIMESTAMP = 1647264500 +SHA256 (psycopg-c-3.0.10.tar.gz) = 48af17cab8cb033bf57acd41431f85818ee7c7f841c833c17ebb1ee57c856567 +SIZE (psycopg-c-3.0.10.tar.gz) = 589281 diff --git a/databases/py-psycopg/Makefile b/databases/py-psycopg/Makefile index 4770b4cd9a8..093340c3d65 100644 --- a/databases/py-psycopg/Makefile +++ b/databases/py-psycopg/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= psycopg -PORTVERSION= 3.0.9 +PORTVERSION= 3.0.10 CATEGORIES= databases python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/databases/py-psycopg/distinfo b/databases/py-psycopg/distinfo index 108cf2bf0a8..98ac662ebd2 100644 --- a/databases/py-psycopg/distinfo +++ b/databases/py-psycopg/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057920 -SHA256 (psycopg-3.0.9.tar.gz) = 9f72c6bdccfdab405c654ea2e9ed2293a3e9f993f66a37621f796732ce8e1855 -SIZE (psycopg-3.0.9.tar.gz) = 116462 +TIMESTAMP = 1647264498 +SHA256 (psycopg-3.0.10.tar.gz) = 9c2ba4b3253af8cd1a31ba365cd8bfae7818cc917e409930abc5571ba66c12d8 +SIZE (psycopg-3.0.10.tar.gz) = 116673 diff --git a/databases/py-sqlalchemy14/Makefile b/databases/py-sqlalchemy14/Makefile index 1fda04f4a42..90465a5b404 100644 --- a/databases/py-sqlalchemy14/Makefile +++ b/databases/py-sqlalchemy14/Makefile @@ -1,5 +1,5 @@ PORTNAME= sqlalchemy -PORTVERSION= 1.4.31 +PORTVERSION= 1.4.32 CATEGORIES= databases python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/databases/py-sqlalchemy14/distinfo b/databases/py-sqlalchemy14/distinfo index 3630e4c36d6..1c9dbd156ae 100644 --- a/databases/py-sqlalchemy14/distinfo +++ b/databases/py-sqlalchemy14/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133731 -SHA256 (SQLAlchemy-1.4.31.tar.gz) = 582b59d1e5780a447aada22b461e50b404a9dc05768da1d87368ad8190468418 -SIZE (SQLAlchemy-1.4.31.tar.gz) = 7999646 +TIMESTAMP = 1647264504 +SHA256 (SQLAlchemy-1.4.32.tar.gz) = 6fdd2dc5931daab778c2b65b03df6ae68376e028a3098eb624d0909d999885bc +SIZE (SQLAlchemy-1.4.32.tar.gz) = 8077546 diff --git a/databases/py-sqlalchemy14/pkg-plist b/databases/py-sqlalchemy14/pkg-plist index 8759929e5fd..7c361ac4ef1 100644 --- a/databases/py-sqlalchemy14/pkg-plist +++ b/databases/py-sqlalchemy14/pkg-plist @@ -61,7 +61,6 @@ %%PORTDOCS%%%%DOCSDIR%%/_static/changelog.css %%PORTDOCS%%%%DOCSDIR%%/_static/deepalchemy.png %%PORTDOCS%%%%DOCSDIR%%/_static/deepalchemy_original.png -%%PORTDOCS%%%%DOCSDIR%%/_static/detectmobile.js %%PORTDOCS%%%%DOCSDIR%%/_static/docs.css %%PORTDOCS%%%%DOCSDIR%%/_static/doctools.js %%PORTDOCS%%%%DOCSDIR%%/_static/documentation_options.js @@ -185,6 +184,7 @@ %%PORTDOCS%%%%DOCSDIR%%/build/orm/declarative_config.rst %%PORTDOCS%%%%DOCSDIR%%/build/orm/declarative_mapping.rst %%PORTDOCS%%%%DOCSDIR%%/build/orm/declarative_mixins.rst +%%PORTDOCS%%%%DOCSDIR%%/build/orm/declarative_styles.rst %%PORTDOCS%%%%DOCSDIR%%/build/orm/declarative_tables.rst %%PORTDOCS%%%%DOCSDIR%%/build/orm/events.rst %%PORTDOCS%%%%DOCSDIR%%/build/orm/examples.rst @@ -352,6 +352,7 @@ %%PORTDOCS%%%%DOCSDIR%%/orm/declarative_config.html %%PORTDOCS%%%%DOCSDIR%%/orm/declarative_mapping.html %%PORTDOCS%%%%DOCSDIR%%/orm/declarative_mixins.html +%%PORTDOCS%%%%DOCSDIR%%/orm/declarative_styles.html %%PORTDOCS%%%%DOCSDIR%%/orm/declarative_tables.html %%PORTDOCS%%%%DOCSDIR%%/orm/events.html %%PORTDOCS%%%%DOCSDIR%%/orm/examples.html diff --git a/databases/py-sqlobject/Makefile b/databases/py-sqlobject/Makefile index 96770c5846c..5f7f4909141 100644 --- a/databases/py-sqlobject/Makefile +++ b/databases/py-sqlobject/Makefile @@ -17,7 +17,7 @@ BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}formencode>=1.2.2:www/py-formencode@${PY_F RUN_DEPENDS:= ${BUILD_DEPENDS} NO_ARCH= yes -USES= python:3.6+ +USES= dos2unix python:3.6+ USE_PYTHON= autoplist distutils # currently MaxDB(SAP), ADODB are not supported diff --git a/databases/py-sqlobject/files/patch-2to3 b/databases/py-sqlobject/files/patch-2to3 new file mode 100644 index 00000000000..66193d081e4 --- /dev/null +++ b/databases/py-sqlobject/files/patch-2to3 @@ -0,0 +1,2505 @@ +--- sqlobject/col.py.orig 2014-05-04 12:48:24 UTC ++++ sqlobject/col.py +@@ -22,17 +22,17 @@ from array import array + from itertools import count + import re, time + try: +- import cPickle as pickle ++ import pickle as pickle + except ImportError: + import pickle + import weakref + from formencode import compound, validators +-from classregistry import findClass ++from .classregistry import findClass + # Sadly the name "constraints" conflicts with many of the function + # arguments in this module, so we rename it: +-import constraints as constrs +-import sqlbuilder +-from styles import capword ++from . import constraints as constrs ++from . import sqlbuilder ++from .styles import capword + + NoDefault = sqlbuilder.NoDefault + +@@ -218,7 +218,7 @@ class SOCol(object): + self.dbEncoding = dbEncoding + + if extra_vars: +- for name, value in extra_vars.items(): ++ for name, value in list(extra_vars.items()): + setattr(self, name, value) + + def _set_validator(self, value): +@@ -286,7 +286,7 @@ class SOCol(object): + + def _sqlType(self): + if self.customSQLType is None: +- raise ValueError, ("Col %s (%s) cannot be used for automatic " ++ raise ValueError("Col %s (%s) cannot be used for automatic " + "schema creation (too abstract)" % + (self.name, self.__class__)) + else: +@@ -399,7 +399,7 @@ class Col(object): + super(Col, self).__init__() + self.__dict__['_name'] = name + self.__dict__['_kw'] = kw +- self.__dict__['creationOrder'] = creationOrder.next() ++ self.__dict__['creationOrder'] = next(creationOrder) + self.__dict__['_extra_vars'] = {} + + def _set_name(self, value): +@@ -473,7 +473,7 @@ class SOStringLikeCol(SOCol): + + def _check_case_sensitive(self, db): + if self.char_binary: +- raise ValueError, "%s does not support binary character columns" % db ++ raise ValueError("%s does not support binary character columns" % db) + + def _mysqlType(self): + type = self._sqlType() +@@ -538,14 +538,14 @@ class StringValidator(SOValidator): + except AttributeError: + binaryType = type(None) # Just a simple workaround + dbEncoding = self.getDbEncoding(state, default='ascii') +- if isinstance(value, unicode): ++ if isinstance(value, str): + return value.encode(dbEncoding) + if self.dataType and isinstance(value, self.dataType): + return value + if isinstance(value, (str, buffer, binaryType, sqlbuilder.SQLExpression)): + return value + if hasattr(value, '__unicode__'): +- return unicode(value).encode(dbEncoding) ++ return str(value).encode(dbEncoding) + raise validators.Invalid("expected a str in the StringCol '%s', got %s %r instead" % \ + (self.name, type(value), value), value, state) + +@@ -563,7 +563,7 @@ class StringCol(Col): + + class NQuoted(sqlbuilder.SQLExpression): + def __init__(self, value): +- assert isinstance(value, unicode) ++ assert isinstance(value, str) + self.value = value + def __hash__(self): + return hash(self.value) +@@ -576,14 +576,14 @@ class UnicodeStringValidator(SOValidator): + def to_python(self, value, state): + if value is None: + return None +- if isinstance(value, (unicode, sqlbuilder.SQLExpression)): ++ if isinstance(value, (str, sqlbuilder.SQLExpression)): + return value + if isinstance(value, str): +- return unicode(value, self.getDbEncoding(state)) ++ return str(value, self.getDbEncoding(state)) + if isinstance(value, array): # MySQL +- return unicode(value.tostring(), self.getDbEncoding(state)) ++ return str(value.tostring(), self.getDbEncoding(state)) + if hasattr(value, '__unicode__'): +- return unicode(value) ++ return str(value) + raise validators.Invalid("expected a str or a unicode in the UnicodeCol '%s', got %s %r instead" % \ + (self.name, type(value), value), value, state) + +@@ -592,7 +592,7 @@ class UnicodeStringValidator(SOValidator): + return None + if isinstance(value, (str, sqlbuilder.SQLExpression)): + return value +- if isinstance(value, unicode): ++ if isinstance(value, str): + try: + connection = state.connection or state.soObject._connection + except AttributeError: +@@ -602,7 +602,7 @@ class UnicodeStringValidator(SOValidator): + return NQuoted(value) + return value.encode(self.getDbEncoding(state)) + if hasattr(value, '__unicode__'): +- return unicode(value).encode(self.getDbEncoding(state)) ++ return str(value).encode(self.getDbEncoding(state)) + raise validators.Invalid("expected a str or a unicode in the UnicodeCol '%s', got %s %r instead" % \ + (self.name, type(value), value), value, state) + +@@ -625,9 +625,9 @@ class IntValidator(SOValidator): + def to_python(self, value, state): + if value is None: + return None +- if isinstance(value, (int, long, sqlbuilder.SQLExpression)): ++ if isinstance(value, (int, sqlbuilder.SQLExpression)): + return value +- for converter, attr_name in (int, '__int__'), (long, '__long__'): ++ for converter, attr_name in (int, '__int__'), (int, '__long__'): + if hasattr(value, attr_name): + try: + return converter(value) +@@ -708,7 +708,7 @@ class BoolValidator(SOValidator): + return None + if isinstance(value, (bool, sqlbuilder.SQLExpression)): + return value +- if isinstance(value, (int, long)) or hasattr(value, '__nonzero__'): ++ if isinstance(value, int) or hasattr(value, '__nonzero__'): + return bool(value) + raise validators.Invalid("expected a bool or an int in the BoolCol '%s', got %s %r instead" % \ + (self.name, type(value), value), value, state) +@@ -753,9 +753,9 @@ class FloatValidator(SOValidator): + def to_python(self, value, state): + if value is None: + return None +- if isinstance(value, (float, int, long, sqlbuilder.SQLExpression)): ++ if isinstance(value, (float, int, sqlbuilder.SQLExpression)): + return value +- for converter, attr_name in (float, '__float__'), (int, '__int__'), (long, '__long__'): ++ for converter, attr_name in (float, '__float__'), (int, '__int__'), (int, '__long__'): + if hasattr(value, attr_name): + try: + return converter(value) +@@ -964,7 +964,7 @@ class EnumValidator(SOValidator): + + def to_python(self, value, state): + if value in self.enumValues: +- if isinstance(value, unicode): ++ if isinstance(value, str): + dbEncoding = self.getDbEncoding(state) + value = value.encode(dbEncoding) + return value +@@ -1000,7 +1000,7 @@ class SOEnumCol(SOCol): + return "ENUM(%s) NOT NULL" % ', '.join([sqlbuilder.sqlrepr(v, 'mysql') for v in self.enumValues]) + + def _postgresType(self): +- length = max(map(self._getlength, self.enumValues)) ++ length = max(list(map(self._getlength, self.enumValues))) + enumValues = ', '.join([sqlbuilder.sqlrepr(v, 'postgres') for v in self.enumValues]) + checkConstraint = "CHECK (%s in (%s))" % (self.dbName, enumValues) + return "VARCHAR(%i) %s" % (length, checkConstraint) +@@ -1014,7 +1014,7 @@ class SOEnumCol(SOCol): + return self._postgresType() + + def _firebirdType(self): +- length = max(map(self._getlength, self.enumValues)) ++ length = max(list(map(self._getlength, self.enumValues))) + enumValues = ', '.join([sqlbuilder.sqlrepr(v, 'firebird') for v in self.enumValues]) + checkConstraint = "CHECK (%s in (%s))" % (self.dbName, enumValues) + #NB. Return a tuple, not a string here +@@ -1048,7 +1048,7 @@ class SetValidator(SOValidator): + (self.name, type(value), value), value, state) + + def from_python(self, value, state): +- if isinstance(value, basestring): ++ if isinstance(value, str): + value = (value,) + try: + return ",".join(value) +@@ -1358,7 +1358,7 @@ class DecimalValidator(SOValidator): + def to_python(self, value, state): + if value is None: + return None +- if isinstance(value, (int, long, Decimal, sqlbuilder.SQLExpression)): ++ if isinstance(value, (int, Decimal, sqlbuilder.SQLExpression)): + return value + if isinstance(value, float): + value = str(value) +@@ -1380,7 +1380,7 @@ class DecimalValidator(SOValidator): + return None + if isinstance(value, float): + value = str(value) +- if isinstance(value, basestring): ++ if isinstance(value, str): + try: + connection = state.connection or state.soObject._connection + except AttributeError: +@@ -1393,7 +1393,7 @@ class DecimalValidator(SOValidator): + except: + raise validators.Invalid("can not parse Decimal value '%s' in the DecimalCol from '%s'" % + (value, getattr(state, 'soObject', '(unknown)')), value, state) +- if isinstance(value, (int, long, Decimal, sqlbuilder.SQLExpression)): ++ if isinstance(value, (int, Decimal, sqlbuilder.SQLExpression)): + return value + raise validators.Invalid("expected a Decimal in the DecimalCol '%s', got %s %r instead" % \ + (self.name, type(value), value), value, state) +@@ -1447,7 +1447,7 @@ class DecimalStringValidator(DecimalValidator): + "Value must be less than %s" % int(self.max) + value = value.quantize(self.precision) + value = value.to_eng_string() +- elif isinstance(value, (int, long)): ++ elif isinstance(value, int): + value = str(value) + return value + +@@ -1569,7 +1569,7 @@ class PickleValidator(BinaryValidator): + def to_python(self, value, state): + if value is None: + return None +- if isinstance(value, unicode): ++ if isinstance(value, str): + dbEncoding = self.getDbEncoding(state, default='ascii') + value = value.encode(dbEncoding) + if isinstance(value, str): +@@ -1610,7 +1610,7 @@ def pushKey(kw, name, value): + kw[name] = value + + all = [] +-for key, value in globals().items(): ++for key, value in list(globals().items()): + if isinstance(value, type) and (issubclass(value, (Col, SOCol))): + all.append(key) + __all__.extend(all) +--- sqlobject/converters.py.orig 2014-05-04 12:48:24 UTC ++++ sqlobject/converters.py +@@ -95,7 +95,7 @@ def StringLikeConverter(value, db): + return "'%s'" % value + + registerConverter(str, StringLikeConverter) +-registerConverter(unicode, StringLikeConverter) ++registerConverter(str, StringLikeConverter) + registerConverter(array, StringLikeConverter) + registerConverter(buffer, StringLikeConverter) + +@@ -107,7 +107,7 @@ registerConverter(int, IntConverter) + def LongConverter(value, db): + return str(value) + +-registerConverter(long, LongConverter) ++registerConverter(int, LongConverter) + + if NumericType: + registerConverter(NumericType, IntConverter) +@@ -203,8 +203,8 @@ def sqlrepr(obj, db=None): + except AttributeError: + converter = lookupConverter(obj) + if converter is None: +- raise ValueError, "Unknown SQL builtin type: %s for %s" % \ +- (type(obj), repr(obj)) ++ raise ValueError("Unknown SQL builtin type: %s for %s" % \ ++ (type(obj), repr(obj))) + return converter(obj, db) + else: + return reprFunc(db) +--- sqlobject/dbconnection.py.orig 2013-07-07 18:43:26 UTC ++++ sqlobject/dbconnection.py +@@ -6,17 +6,17 @@ import os + import sys + import threading + import types +-import urllib ++import urllib.request, urllib.parse, urllib.error + import warnings + import weakref + +-from cache import CacheSet +-import classregistry +-import col +-from converters import sqlrepr +-import main +-import sqlbuilder +-from util.threadinglocal import local as threading_local ++from .cache import CacheSet ++from . import classregistry ++from . import col ++from .converters import sqlrepr ++from . import main ++from . import sqlbuilder ++from .util.threadinglocal import local as threading_local + + warnings.filterwarnings("ignore", "DB-API extension cursor.lastrowid used") + +@@ -34,7 +34,7 @@ class ConsoleWriter: + self.dbEncoding = getattr(connection, "dbEncoding", None) or "ascii" + def write(self, text): + logfile = getattr(sys, self.loglevel) +- if isinstance(text, unicode): ++ if isinstance(text, str): + try: + text = text.encode(self.dbEncoding) + except UnicodeEncodeError: +@@ -111,9 +111,9 @@ class DBConnection: + def uri(self): + auth = getattr(self, 'user', '') or '' + if auth: +- auth = urllib.quote(auth) ++ auth = urllib.parse.quote(auth) + if self.password: +- auth = auth + ':' + urllib.quote(self.password) ++ auth = auth + ':' + urllib.parse.quote(self.password) + auth = auth + '@' + else: + assert not getattr(self, 'password', None), ( +@@ -127,7 +127,7 @@ class DBConnection: + db = self.db + if db.startswith('/'): + db = db[1:] +- return uri + urllib.quote(db) ++ return uri + urllib.parse.quote(db) + + @classmethod + def connectionFromOldURI(cls, uri): +@@ -167,9 +167,9 @@ class DBConnection: + try: + port = int(port) + except ValueError: +- raise ValueError, "port must be integer, got '%s' instead" % port ++ raise ValueError("port must be integer, got '%s' instead" % port) + if not (1 <= port <= 65535): +- raise ValueError, "port must be integer in the range 1-65535, got '%d' instead" % port ++ raise ValueError("port must be integer in the range 1-65535, got '%d' instead" % port) + host = _host + else: + port = None +@@ -183,15 +183,15 @@ class DBConnection: + arglist = arglist.split('&') + for single in arglist: + argname, argvalue = single.split('=', 1) +- argvalue = urllib.unquote(argvalue) ++ argvalue = urllib.parse.unquote(argvalue) + args[argname] = argvalue + return user, password, host, port, path, args + + @staticmethod + def _parseURI(uri): +- protocol, request = urllib.splittype(uri) ++ protocol, request = urllib.parse.splittype(uri) + user, password, port = None, None, None +- host, path = urllib.splithost(request) ++ host, path = urllib.parse.splithost(request) + + if host: + # Python < 2.7 have a problem - splituser() calls unquote() too early +@@ -199,17 +199,17 @@ class DBConnection: + if '@' in host: + user, host = host.split('@', 1) + if user: +- user, password = [x and urllib.unquote(x) or None for x in urllib.splitpasswd(user)] +- host, port = urllib.splitport(host) ++ user, password = [x and urllib.parse.unquote(x) or None for x in urllib.parse.splitpasswd(user)] ++ host, port = urllib.parse.splitport(host) + if port: port = int(port) + elif host == '': + host = None + + # hash-tag is splitted but ignored +- path, tag = urllib.splittag(path) +- path, query = urllib.splitquery(path) ++ path, tag = urllib.parse.splittag(path) ++ path, query = urllib.parse.splitquery(path) + +- path = urllib.unquote(path) ++ path = urllib.parse.unquote(path) + if (os.name == 'nt') and (len(path) > 2): + # Preserve backward compatibility with URIs like /C|/path; + # replace '|' by ':' +@@ -282,7 +282,7 @@ class ConnWrapper(object): + "because it takes **kw: %r" + % meth) + takes_conn = 'connection' in args +- meth.im_func.takes_connection = takes_conn ++ meth.__func__.takes_connection = takes_conn + if not takes_conn: + return meth + return ConnMethodWrapper(meth, self._connection) +@@ -363,7 +363,7 @@ class DBAPI(DBConnection): + if self.debug: + self.printDebug(conn, 'auto/exception', 'ROLLBACK') + conn.rollback() +- raise Exception, 'Object used outside of a transaction; implicit COMMIT or ROLLBACK not allowed' ++ raise Exception('Object used outside of a transaction; implicit COMMIT or ROLLBACK not allowed') + elif self.autoCommit: + if self.debug: + self.printDebug(conn, 'auto', 'COMMIT') +@@ -593,7 +593,7 @@ class DBAPI(DBConnection): + + def _SO_selectOneAlt(self, so, columnNames, condition): + if columnNames: +- columns = [isinstance(x, basestring) and sqlbuilder.SQLConstant(x) or x for x in columnNames] ++ columns = [isinstance(x, str) and sqlbuilder.SQLConstant(x) or x for x in columnNames] + else: + columns = None + return self.queryOne(self.sqlrepr(sqlbuilder.Select(columns, +@@ -643,7 +643,7 @@ class DBAPI(DBConnection): + data = {} + if 'id' in kw: + data[soClass.sqlmeta.idName] = kw.pop('id') +- for key, col in soClass.sqlmeta.columns.items(): ++ for key, col in list(soClass.sqlmeta.columns.items()): + if key in kw: + value = kw.pop(key) + if col.from_python: +@@ -657,7 +657,7 @@ class DBAPI(DBConnection): + data[col.dbName] = obj + if kw: + # pick the first key from kw to use to raise the error, +- raise TypeError, "got an unexpected keyword argument(s): %r" % kw.keys() ++ raise TypeError("got an unexpected keyword argument(s): %r" % list(kw.keys())) + + if not data: + return None +@@ -665,7 +665,7 @@ class DBAPI(DBConnection): + ['%s %s %s' % + (dbName, ops.get(value, "="), self.sqlrepr(value)) + for dbName, value +- in data.items()]) ++ in list(data.items())]) + + def sqlrepr(self, v): + return sqlrepr(v, self.dbName) +@@ -718,7 +718,7 @@ class Iteration(object): + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + result = self.cursor.fetchone() + if result is None: + self._cleanup() +@@ -791,7 +791,7 @@ class Transaction(object): + if not cls in self._deletedCache: + self._deletedCache[cls] = [] + self._deletedCache[cls].append(inst.id) +- meth = new.instancemethod(self._dbConnection._SO_delete.im_func, self, self.__class__) ++ meth = new.instancemethod(self._dbConnection._SO_delete.__func__, self, self.__class__) + return meth(inst) + + def commit(self, close=False): +@@ -801,8 +801,8 @@ class Transaction(object): + if self._dbConnection.debug: + self._dbConnection.printDebug(self._connection, '', 'COMMIT') + self._connection.commit() +- subCaches = [(sub[0], sub[1].allIDs()) for sub in self.cache.allSubCachesByClassNames().items()] +- subCaches.extend([(x[0], x[1]) for x in self._deletedCache.items()]) ++ subCaches = [(sub[0], sub[1].allIDs()) for sub in list(self.cache.allSubCachesByClassNames().items())] ++ subCaches.extend([(x[0], x[1]) for x in list(self._deletedCache.items())]) + for cls, ids in subCaches: + for id in ids: + inst = self._dbConnection.cache.tryGetByName(id, cls) +@@ -836,7 +836,7 @@ class Transaction(object): + self.assertActive() + attr = getattr(self._dbConnection, attr) + try: +- func = attr.im_func ++ func = attr.__func__ + except AttributeError: + if isinstance(attr, ConnWrapper): + return ConnWrapper(attr._soClass, self) +@@ -996,9 +996,9 @@ class ConnectionURIOpener(object): + def connectionForURI(self, uri, oldUri=False, **args): + if args: + if '?' not in uri: +- uri += '?' + urllib.urlencode(args) ++ uri += '?' + urllib.parse.urlencode(args) + else: +- uri += '&' + urllib.urlencode(args) ++ uri += '&' + urllib.parse.urlencode(args) + if uri in self.cachedURIs: + return self.cachedURIs[uri] + if uri.find(':') != -1: +@@ -1020,7 +1020,7 @@ class ConnectionURIOpener(object): + def dbConnectionForScheme(self, scheme): + assert scheme in self.schemeBuilders, ( + "No SQLObject driver exists for %s (only %s)" +- % (scheme, ', '.join(self.schemeBuilders.keys()))) ++ % (scheme, ', '.join(list(self.schemeBuilders.keys())))) + return self.schemeBuilders[scheme]() + + TheURIOpener = ConnectionURIOpener() +@@ -1031,11 +1031,11 @@ connectionForURI = TheURIOpener.connectionForURI + dbConnectionForScheme = TheURIOpener.dbConnectionForScheme + + # Register DB URI schemas +-import firebird +-import maxdb +-import mssql +-import mysql +-import postgres +-import rdbhost +-import sqlite +-import sybase ++from . import firebird ++from . import maxdb ++from . import mssql ++from . import mysql ++from . import postgres ++from . import rdbhost ++from . import sqlite ++from . import sybase +--- sqlobject/events.py.orig 2010-11-13 17:42:40 UTC ++++ sqlobject/events.py +@@ -206,25 +206,25 @@ def summarize_events_by_sender(sender=None, output=Non + if sender is None: + send_list = [ + (deref(dispatcher.senders.get(sid)), listeners) +- for sid, listeners in dispatcher.connections.items() ++ for sid, listeners in list(dispatcher.connections.items()) + if deref(dispatcher.senders.get(sid))] + for sender, listeners in sorted_items(send_list): + real_sender = deref(sender) + if not real_sender: + continue + header = 'Sender: %r' % real_sender +- print >> output, (' '*indent) + header +- print >> output, (' '*indent) + '='*len(header) ++ print((' '*indent) + header, file=output) ++ print((' '*indent) + '='*len(header), file=output) + summarize_events_by_sender(real_sender, output=output, indent=indent+2) + else: + for signal, receivers in sorted_items(dispatcher.connections.get(id(sender), [])): + receivers = [deref(r) for r in receivers if deref(r)] + header = 'Signal: %s (%i receivers)' % (sort_name(signal), + len(receivers)) +- print >> output, (' '*indent) + header +- print >> output, (' '*indent) + '-'*len(header) ++ print((' '*indent) + header, file=output) ++ print((' '*indent) + '-'*len(header), file=output) + for receiver in sorted(receivers, key=sort_name): +- print >> output, (' '*indent) + ' ' + nice_repr(receiver) ++ print((' '*indent) + ' ' + nice_repr(receiver), file=output) + + def deref(value): + if isinstance(value, dispatcher.WEAKREF_TYPES): +@@ -234,14 +234,14 @@ def deref(value): + + def sorted_items(a_dict): + if isinstance(a_dict, dict): +- a_dict = a_dict.items() ++ a_dict = list(a_dict.items()) + return sorted(a_dict, key=lambda t: sort_name(t[0])) + + def sort_name(value): + if isinstance(value, type): + return value.__name__ + elif isinstance(value, types.FunctionType): +- return value.func_name ++ return value.__name__ + else: + return str(value) + +@@ -262,26 +262,26 @@ def debug_events(): + + def _debug_send(signal=dispatcher.Any, sender=dispatcher.Anonymous, + *arguments, **named): +- print "send %s from %s: %s" % ( +- nice_repr(signal), nice_repr(sender), fmt_args(*arguments, **named)) ++ print("send %s from %s: %s" % ( ++ nice_repr(signal), nice_repr(sender), fmt_args(*arguments, **named))) + return _real_dispatcher_send(signal, sender, *arguments, **named) + + def _debug_sendExact(signal=dispatcher.Any, sender=dispatcher.Anonymous, + *arguments, **named): +- print "sendExact %s from %s: %s" % ( +- nice_repr(signal), nice_repr(sender), fmt_args(*arguments, **name)) ++ print("sendExact %s from %s: %s" % ( ++ nice_repr(signal), nice_repr(sender), fmt_args(*arguments, **name))) + return _real_dispatcher_sendExact(signal, sender, *arguments, **named) + + def _debug_connect(receiver, signal=dispatcher.Any, sender=dispatcher.Any, + weak=True): +- print "connect %s to %s signal %s" % ( +- nice_repr(receiver), nice_repr(signal), nice_repr(sender)) ++ print("connect %s to %s signal %s" % ( ++ nice_repr(receiver), nice_repr(signal), nice_repr(sender))) + return _real_dispatcher_connect(receiver, signal, sender, weak) + + def _debug_disconnect(receiver, signal=dispatcher.Any, sender=dispatcher.Any, + weak=True): +- print "disconnecting %s from %s signal %s" % ( +- nice_repr(receiver), nice_repr(signal), nice_repr(sender)) ++ print("disconnecting %s from %s signal %s" % ( ++ nice_repr(receiver), nice_repr(signal), nice_repr(sender))) + return disconnect(receiver, signal, sender, weak) + + def fmt_args(*arguments, **name): +@@ -294,23 +294,23 @@ def nice_repr(v): + """ + Like repr(), but nicer for debugging here. + """ +- if isinstance(v, (types.ClassType, type)): ++ if isinstance(v, type): + return v.__module__ + '.' + v.__name__ + elif isinstance(v, types.FunctionType): +- if '__name__' in v.func_globals: +- if getattr(sys.modules[v.func_globals['__name__']], +- v.func_name, None) is v: +- return '%s.%s' % (v.func_globals['__name__'], v.func_name) ++ if '__name__' in v.__globals__: ++ if getattr(sys.modules[v.__globals__['__name__']], ++ v.__name__, None) is v: ++ return '%s.%s' % (v.__globals__['__name__'], v.__name__) + return repr(v) + elif isinstance(v, types.MethodType): + return '%s.%s of %s' % ( +- nice_repr(v.im_class), v.im_func.func_name, +- nice_repr(v.im_self)) ++ nice_repr(v.__self__.__class__), v.__func__.__name__, ++ nice_repr(v.__self__)) + else: + return repr(v) + + + __all__ = ['listen', 'send'] +-for name, value in globals().items(): ++for name, value in list(globals().items()): + if isinstance(value, type) and issubclass(value, Signal): + __all__.append(name) +--- sqlobject/include/pydispatch/dispatcher.py.orig 2011-05-15 15:48:27 UTC ++++ sqlobject/include/pydispatch/dispatcher.py +@@ -25,20 +25,14 @@ Internal attributes: + deletion, (considerably speeds up the cleanup process + vs. the original code.) + """ +-from __future__ import generators ++ + import types, weakref +-import saferef, robustapply, errors ++from . import saferef, robustapply, errors + + __author__ = "Patrick K. O'Brien " + __cvsid__ = "$Id: dispatcher.py,v 1.9 2005/09/17 04:55:57 mcfletch Exp $" + __version__ = "$Revision: 1.9 $"[11:-2] + +-try: +- True +-except NameError: +- True = 1==1 +- False = 1==0 +- + class _Parameter: + """Used to represent default parameter values.""" + def __repr__(self): +@@ -379,8 +373,8 @@ def _removeReceiver(receiver): + backKey = id(receiver) + for senderkey in sendersBack.get(backKey,()): + try: +- signals = connections[senderkey].keys() +- except KeyError,err: ++ signals = list(connections[senderkey].keys()) ++ except KeyError as err: + pass + else: + for signal in signals: +@@ -391,7 +385,7 @@ def _removeReceiver(receiver): + else: + try: + receivers.remove( receiver ) +- except Exception, err: ++ except Exception as err: + pass + _cleanupConnections(senderkey, signal) + try: +@@ -440,7 +434,7 @@ def _removeBackrefs( senderkey): + except KeyError: + signals = None + else: +- items = signals.items() ++ items = list(signals.items()) + def allReceivers( ): + for signal,set in items: + for item in set: +@@ -468,7 +462,7 @@ def _removeOldBackRefs(senderkey, signal, receiver, re + found = 0 + signals = connections.get(signal) + if signals is not None: +- for sig,recs in connections.get(signal,{}).iteritems(): ++ for sig,recs in connections.get(signal,{}).items(): + if sig != signal: + for rec in recs: + if rec is oldReceiver: +--- sqlobject/include/pydispatch/robust.py.orig 2006-02-09 16:14:04 UTC ++++ sqlobject/include/pydispatch/robust.py +@@ -1,6 +1,6 @@ + """Module implementing error-catching version of send (sendRobust)""" +-from dispatcher import Any, Anonymous, liveReceivers, getAllReceivers +-from robustapply import robustApply ++from .dispatcher import Any, Anonymous, liveReceivers, getAllReceivers ++from .robustapply import robustApply + + def sendRobust( + signal=Any, +@@ -50,7 +50,7 @@ def sendRobust( + *arguments, + **named + ) +- except Exception, err: ++ except Exception as err: + responses.append((receiver, err)) + else: + responses.append((receiver, response)) +--- sqlobject/include/pydispatch/saferef.py.orig 2006-02-09 16:14:04 UTC ++++ sqlobject/include/pydispatch/saferef.py +@@ -13,7 +13,7 @@ def safeRef(target, onDelete = None): + weakref or a BoundMethodWeakref) as argument. + """ + if hasattr(target, 'im_self'): +- if target.im_self is not None: ++ if target.__self__ is not None: + # Turn a bound method into a BoundMethodWeakref instance. + # Keep track of these instances for lookup by disconnect(). + assert hasattr(target, 'im_func'), """safeRef target %r has im_self, but no im_func, don't know how to create reference"""%( target,) +@@ -109,26 +109,26 @@ class BoundMethodWeakref(object): + try: + if callable( function ): + function( self ) +- except Exception, e: ++ except Exception as e: + try: + traceback.print_exc() +- except AttributeError, err: +- print '''Exception during saferef %s cleanup function %s: %s'''%( ++ except AttributeError as err: ++ print('''Exception during saferef %s cleanup function %s: %s'''%( + self, function, e +- ) ++ )) + self.deletionMethods = [onDelete] + self.key = self.calculateKey( target ) +- self.weakSelf = weakref.ref(target.im_self, remove) +- self.weakFunc = weakref.ref(target.im_func, remove) +- self.selfName = str(target.im_self) +- self.funcName = str(target.im_func.__name__) ++ self.weakSelf = weakref.ref(target.__self__, remove) ++ self.weakFunc = weakref.ref(target.__func__, remove) ++ self.selfName = str(target.__self__) ++ self.funcName = str(target.__func__.__name__) + def calculateKey( cls, target ): + """Calculate the reference key for this reference + + Currently this is a two-tuple of the id()'s of the + target object and the target function respectively. + """ +- return (id(target.im_self),id(target.im_func)) ++ return (id(target.__self__),id(target.__func__)) + calculateKey = classmethod( calculateKey ) + def __str__(self): + """Give a friendly representation of the object""" +@@ -138,7 +138,7 @@ class BoundMethodWeakref(object): + self.funcName, + ) + __repr__ = __str__ +- def __nonzero__( self ): ++ def __bool__( self ): + """Whether we are still a valid reference""" + return self() is not None + def __cmp__( self, other ): +--- sqlobject/index.py.orig 2011-05-15 15:48:27 UTC ++++ sqlobject/index.py +@@ -1,6 +1,6 @@ + from itertools import count + from types import * +-from converters import sqlrepr ++from .converters import sqlrepr + + creationOrder = count() + +@@ -20,15 +20,15 @@ class SODatabaseIndex(object): + + def get(self, *args, **kw): + if not self.unique: +- raise AttributeError, ( ++ raise AttributeError( + "'%s' object has no attribute 'get' (index is not unique)" % self.name) + connection = kw.pop('connection', None) + if args and kw: +- raise TypeError, "You cannot mix named and unnamed arguments" ++ raise TypeError("You cannot mix named and unnamed arguments") + columns = [d['column'] for d in self.descriptions + if 'column' in d] + if kw and len(kw) != len(columns) or args and len(args) != len(columns): +- raise TypeError, ("get() takes exactly %d argument and an optional " ++ raise TypeError("get() takes exactly %d argument and an optional " + "named argument 'connection' (%d given)" % ( + len(columns), len(args)+len(kw))) + if args: +@@ -65,13 +65,13 @@ class SODatabaseIndex(object): + columnName = columnName.name + colDict = self.soClass.sqlmeta.columns + if columnName not in colDict: +- for possible in colDict.values(): ++ for possible in list(colDict.values()): + if possible.origName == columnName: + column = possible + break + else: + # None found +- raise ValueError, "The column by the name %r was not found in the class %r" % (columnName, self.soClass) ++ raise ValueError("The column by the name %r was not found in the class %r" % (columnName, self.soClass)) + else: + column = colDict[columnName] + desc['column'] = column +@@ -153,7 +153,7 @@ class DatabaseIndex(object): + def __init__(self, *columns, **kw): + kw['columns'] = columns + self.kw = kw +- self.creationOrder = creationOrder.next() ++ self.creationOrder = next(creationOrder) + + def setName(self, value): + assert self.kw.get('name') is None, "You cannot change a name after it has already been set (from %s to %s)" % (self.kw['name'], value) +--- sqlobject/inheritance/__init__.py.orig 2011-05-15 15:48:27 UTC ++++ sqlobject/inheritance/__init__.py +@@ -5,7 +5,8 @@ from sqlobject import sqlbuilder + from sqlobject.col import StringCol, ForeignKey + from sqlobject.main import sqlmeta, SQLObject, SelectResults, \ + makeProperties, unmakeProperties, getterName, setterName +-import iteration ++from . import iteration ++from functools import reduce + + def tablesUsedSet(obj, db): + if hasattr(obj, "tablesUsedSet"): +@@ -35,7 +36,7 @@ class InheritableSelectResults(SelectResults): + if inheritedTables: + for tableName in inheritedTables: + tablesSet.add(str(tableName)) +- if orderBy and not isinstance(orderBy, basestring): ++ if orderBy and not isinstance(orderBy, str): + tablesSet.update(tablesUsedSet(orderBy, dbName)) + #DSM: if this class has a parent, we need to link it + #DSM: and be sure the parent is in the table list. +@@ -69,7 +70,7 @@ class InheritableSelectResults(SelectResults): + #DSM: Table registry contains only the last children + #DSM: or standalone classes + parentClause = [] +- for (currentClass, minParentClass) in tableRegistry.items(): ++ for (currentClass, minParentClass) in list(tableRegistry.items()): + while (currentClass != minParentClass) \ + and currentClass.sqlmeta.parentClass: + parentClass = currentClass.sqlmeta.parentClass +@@ -86,7 +87,7 @@ class InheritableSelectResults(SelectResults): + return super(InheritableSelectResults, self).accumulateMany(*attributes) + tables = [] + for func_name, attribute in attributes: +- if not isinstance(attribute, basestring): ++ if not isinstance(attribute, str): + tables.append(attribute.tableName) + clone = self.__class__(self.sourceClass, self.clause, + self.clauseTables, inheritedTables=tables, **self.ops) +@@ -130,7 +131,7 @@ class InheritableSQLMeta(sqlmeta): + q = getattr(soClass.q, columnDef.name, None) + else: + q = None +- for c in sqlmeta.childClasses.values(): ++ for c in list(sqlmeta.childClasses.values()): + c.sqlmeta.addColumn(columnDef, connection=connection, childUpdate=True) + if q: setattr(c.q, columnDef.name, q) + +@@ -153,7 +154,7 @@ class InheritableSQLMeta(sqlmeta): + + #DSM: Update each child class if needed + #DSM: and delete properties for this column +- for c in sqlmeta.childClasses.values(): ++ for c in list(sqlmeta.childClasses.values()): + c.sqlmeta.delColumn(column, changeSchema=changeSchema, + connection=connection, childUpdate=True) + +@@ -184,7 +185,7 @@ class InheritableSQLMeta(sqlmeta): + + #DSM: Update each child class if needed and existing (only for new + #DSM: dynamic join as no child classes exists at object creation) +- for c in sqlmeta.childClasses.values(): ++ for c in list(sqlmeta.childClasses.values()): + c.sqlmeta.addJoin(joinDef, childUpdate=True) + + @classmethod +@@ -199,7 +200,7 @@ class InheritableSQLMeta(sqlmeta): + + #DSM: Update each child class if needed + #DSM: and delete properties for this join +- for c in sqlmeta.childClasses.values(): ++ for c in list(sqlmeta.childClasses.values()): + c.sqlmeta.delJoin(joinDef, childUpdate=True) + + @classmethod +@@ -236,7 +237,7 @@ class InheritableSQLObject(SQLObject): + # if we are a child class, add sqlbuilder fields from parents + currentClass = cls.sqlmeta.parentClass + while currentClass: +- for column in currentClass.sqlmeta.columnDefinitions.values(): ++ for column in list(currentClass.sqlmeta.columnDefinitions.values()): + if column.name == 'childName': + continue + if isinstance(column, ForeignKey): +@@ -319,7 +320,7 @@ class InheritableSQLObject(SQLObject): + # verify names of added columns + if sqlmeta.parentClass: + # FIXME: this does not check for grandparent column overrides +- parentCols = sqlmeta.parentClass.sqlmeta.columns.keys() ++ parentCols = list(sqlmeta.parentClass.sqlmeta.columns.keys()) + for column in sqlmeta.columnList: + if column.name == 'childName': + raise AttributeError( +@@ -357,7 +358,7 @@ class InheritableSQLObject(SQLObject): + parentClass = self.sqlmeta.parentClass + new_kw = {} + parent_kw = {} +- for (name, value) in kw.items(): ++ for (name, value) in list(kw.items()): + if (name != 'childName') and hasattr(parentClass, name): + parent_kw[name] = value + else: +@@ -370,7 +371,7 @@ class InheritableSQLObject(SQLObject): + for col in self.sqlmeta.columnList: + if (col._default == sqlbuilder.NoDefault) and \ + (col.name not in kw) and (col.foreignName not in kw): +- raise TypeError, "%s() did not get expected keyword argument %s" % (self.__class__.__name__, col.name) ++ raise TypeError("%s() did not get expected keyword argument %s" % (self.__class__.__name__, col.name)) + + parent_kw['childName'] = self.sqlmeta.childName + self._parent = parentClass(kw=parent_kw, +@@ -426,7 +427,7 @@ class InheritableSQLObject(SQLObject): + addClause = parentClass.q.childName == cls.sqlmeta.childName + # if the clause was one of TRUE varians, replace it + if (clause is None) or (clause is sqlbuilder.SQLTrueClause) \ +- or (isinstance(clause, basestring) and (clause == 'all')): ++ or (isinstance(clause, str) and (clause == 'all')): + clause = addClause + else: + # patch WHERE condition: +@@ -471,11 +472,11 @@ class InheritableSQLObject(SQLObject): + currentClass = cls + while currentClass: + foreignColumns.update(dict([(column.foreignName, name) +- for (name, column) in currentClass.sqlmeta.columns.items() ++ for (name, column) in list(currentClass.sqlmeta.columns.items()) + if column.foreignKey + ])) + currentClass = currentClass.sqlmeta.parentClass +- for name, value in kw.items(): ++ for name, value in list(kw.items()): + if name in foreignColumns: + name = foreignColumns[name] # translate "key" to "keyID" + if isinstance(value, SQLObject): +@@ -485,7 +486,7 @@ class InheritableSQLObject(SQLObject): + try: + clause.append(getattr(currentClass.q, name) == value) + break +- except AttributeError, err: ++ except AttributeError as err: + pass + currentClass = currentClass.sqlmeta.parentClass + else: +--- sqlobject/main.py.orig 2013-10-14 16:07:00 UTC ++++ sqlobject/main.py +@@ -28,23 +28,23 @@ USA. + + import threading + import weakref +-import sqlbuilder +-import dbconnection +-import col +-import styles ++from . import sqlbuilder ++from . import dbconnection ++from . import col ++from . import styles + import types + import warnings +-import joins +-import index +-import classregistry +-import declarative +-import events +-from sresults import SelectResults +-from util.threadinglocal import local ++from . import joins ++from . import index ++from . import classregistry ++from . import declarative ++from . import events ++from .sresults import SelectResults ++from .util.threadinglocal import local + + import sys + if sys.version_info[:3] < (2, 5, 0): +- raise ImportError, "SQLObject requires Python 2.5.0 or later" ++ raise ImportError("SQLObject requires Python 2.5.0 or later") + + """ + This thread-local storage is needed for RowCreatedSignals. It gathers +@@ -81,7 +81,7 @@ def makeProperties(obj): + d = obj.__dict__ + + props = {} +- for var, value in d.items(): ++ for var, value in list(d.items()): + if var.startswith('_set_'): + props.setdefault(var[5:], {})['set'] = value + elif var.startswith('_get_'): +@@ -90,7 +90,7 @@ def makeProperties(obj): + props.setdefault(var[5:], {})['del'] = value + elif var.startswith('_doc_'): + props.setdefault(var[5:], {})['doc'] = value +- for var, setters in props.items(): ++ for var, setters in list(props.items()): + if len(setters) == 1 and 'doc' in setters: + continue + if var in d: +@@ -115,7 +115,7 @@ def unmakeProperties(obj): + delFunc = delattr + d = obj.__dict__ + +- for var, value in d.items(): ++ for var, value in list(d.items()): + if isinstance(value, property): + for prop in [value.fget, value.fset, value.fdel]: + if prop and not prop.__name__ in d: +@@ -148,7 +148,7 @@ def _collectAttributes(cls, new_attrs, look_for_class) + + """ + result = [] +- for attr, value in new_attrs.items(): ++ for attr, value in list(new_attrs.items()): + if isinstance(value, look_for_class): + value.name = attr + delattr(cls, attr) +@@ -162,7 +162,7 @@ class CreateNewSQLObject: + """ + pass + +-class sqlmeta(object): ++class sqlmeta(object, metaclass=declarative.DeclarativeMeta): + + """ + This object is the object we use to keep track of all sorts of +@@ -236,8 +236,6 @@ class sqlmeta(object): + # Default encoding for UnicodeCol's + dbEncoding = None + +- __metaclass__ = declarative.DeclarativeMeta +- + def __classinit__(cls, new_attrs): + for attr in cls._unshared_attributes: + if attr not in new_attrs: +@@ -321,7 +319,7 @@ class sqlmeta(object): + parent_columns = [] + for base in soClass.__bases__: + if hasattr(base, "sqlmeta"): +- parent_columns.extend(base.sqlmeta.columns.keys()) ++ parent_columns.extend(list(base.sqlmeta.columns.keys())) + if hasattr(soClass, name): + assert (name in parent_columns) or (name == "childName"), ( + "The class %s.%s already has a variable or method %r, you cannot " +@@ -440,7 +438,7 @@ class sqlmeta(object): + conn = connection or soClass._connection + for columnDef in conn.columnsFromSchema(sqlmeta.table, soClass): + if columnDef.name not in sqlmeta.columnDefinitions: +- if isinstance(columnDef.name, unicode): ++ if isinstance(columnDef.name, str): + columnDef.name = columnDef.name.encode('ascii') + sqlmeta.addColumn(columnDef) + +@@ -456,7 +454,7 @@ class sqlmeta(object): + else: + raise ValueError('Unknown column ' + column) + if isinstance(column, col.Col): +- for c in sqlmeta.columns.values(): ++ for c in list(sqlmeta.columns.values()): + if column is c.columnDef: + column = c + break +@@ -704,10 +702,8 @@ _postponed_local = local() + # here, though -- just automatic method generation (like + # methods and properties for each column) is done in + # MetaSQLObject. +-class SQLObject(object): ++class SQLObject(object, metaclass=declarative.DeclarativeMeta): + +- __metaclass__ = declarative.DeclarativeMeta +- + _connection = sqlhub + + sqlmeta = sqlmeta +@@ -770,12 +766,12 @@ class SQLObject(object): + # _columns where the attribute has been set to None in this + # class. If so, then we need to remove that column from + # _columns. +- for key in sqlmeta.columnDefinitions.keys(): ++ for key in list(sqlmeta.columnDefinitions.keys()): + if (key in new_attrs + and new_attrs[key] is None): + del sqlmeta.columnDefinitions[key] + +- for column in sqlmeta.columnDefinitions.values(): ++ for column in list(sqlmeta.columnDefinitions.values()): + sqlmeta.addColumn(column) + + for column in implicitColumns: +@@ -850,7 +846,7 @@ class SQLObject(object): + "(while fixing up sqlmeta %r inheritance)" + % cls.sqlmeta) + values = dict(cls.sqlmeta.__dict__) +- for key in values.keys(): ++ for key in list(values.keys()): + if key.startswith('__') and key.endswith('__'): + # Magic values shouldn't be passed through: + del values[key] +@@ -931,7 +927,7 @@ class SQLObject(object): + dbNames = [col.dbName for col in self.sqlmeta.columnList] + selectResults = self._connection._SO_selectOne(self, dbNames) + if not selectResults: +- raise SQLObjectNotFound, "The object %s by the ID %s does not exist" % (self.__class__.__name__, self.id) ++ raise SQLObjectNotFound("The object %s by the ID %s does not exist" % (self.__class__.__name__, self.id)) + self._SO_selectInit(selectResults) + self._SO_createValues = {} + self.sqlmeta.dirty = False +@@ -958,7 +954,7 @@ class SQLObject(object): + dbNames = [col.dbName for col in self.sqlmeta.columnList] + selectResults = self._connection._SO_selectOne(self, dbNames) + if not selectResults: +- raise SQLObjectNotFound, "The object %s by the ID %s has been deleted" % (self.__class__.__name__, self.id) ++ raise SQLObjectNotFound("The object %s by the ID %s has been deleted" % (self.__class__.__name__, self.id)) + self._SO_selectInit(selectResults) + result = getattr(self, attrName) + return result +@@ -973,7 +969,7 @@ class SQLObject(object): + dbNames = [col.dbName for col in self.sqlmeta.columnList] + selectResults = self._connection._SO_selectOne(self, dbNames) + if not selectResults: +- raise SQLObjectNotFound, "The object %s by the ID %s has been deleted" % (self.__class__.__name__, self.id) ++ raise SQLObjectNotFound("The object %s by the ID %s has been deleted" % (self.__class__.__name__, self.id)) + self._SO_selectInit(selectResults) + self.sqlmeta.expired = False + finally: +@@ -986,7 +982,7 @@ class SQLObject(object): + try: + if self.sqlmeta.columns: + values = [(self.sqlmeta.columns[v[0]].dbName, v[1]) +- for v in self._SO_createValues.items()] ++ for v in list(self._SO_createValues.items())] + self._connection._SO_update(self, values) + self.sqlmeta.dirty = False + self._SO_createValues = {} +@@ -1067,13 +1063,13 @@ class SQLObject(object): + is_column = lambda _c: _c in self.sqlmeta._plainSetters + f_is_column = lambda item: is_column(item[0]) + f_not_column = lambda item: not is_column(item[0]) +- items = kw.items() +- extra = dict(filter(f_not_column, items)) +- kw = dict(filter(f_is_column, items)) ++ items = list(kw.items()) ++ extra = dict(list(filter(f_not_column, items))) ++ kw = dict(list(filter(f_is_column, items))) + + # _creating is special, see _SO_setValue + if self.sqlmeta._creating or self.sqlmeta.lazyUpdate: +- for name, value in kw.items(): ++ for name, value in list(kw.items()): + from_python = getattr(self, '_SO_from_python_%s' % name, None) + if from_python: + kw[name] = dbValue = from_python(value, self._SO_validatorState) +@@ -1086,16 +1082,16 @@ class SQLObject(object): + + self._SO_createValues.update(kw) + +- for name, value in extra.items(): ++ for name, value in list(extra.items()): + try: + getattr(self.__class__, name) + except AttributeError: + if name not in self.sqlmeta.columns: +- raise TypeError, "%s.set() got an unexpected keyword argument %s" % (self.__class__.__name__, name) ++ raise TypeError("%s.set() got an unexpected keyword argument %s" % (self.__class__.__name__, name)) + try: + setattr(self, name, value) +- except AttributeError, e: +- raise AttributeError, '%s (with attribute %r)' % (e, name) ++ except AttributeError as e: ++ raise AttributeError('%s (with attribute %r)' % (e, name)) + + self.sqlmeta.dirty = True + return +@@ -1112,7 +1108,7 @@ class SQLObject(object): + # read the user's mind. We'll combine everything + # else into a single UPDATE, if necessary. + toUpdate = {} +- for name, value in kw.items(): ++ for name, value in list(kw.items()): + from_python = getattr(self, '_SO_from_python_%s' % name, None) + if from_python: + dbValue = from_python(value, self._SO_validatorState) +@@ -1124,20 +1120,20 @@ class SQLObject(object): + if self.sqlmeta.cacheValues: + setattr(self, instanceName(name), value) + toUpdate[name] = dbValue +- for name, value in extra.items(): ++ for name, value in list(extra.items()): + try: + getattr(self.__class__, name) + except AttributeError: + if name not in self.sqlmeta.columns: +- raise TypeError, "%s.set() got an unexpected keyword argument %s" % (self.__class__.__name__, name) ++ raise TypeError("%s.set() got an unexpected keyword argument %s" % (self.__class__.__name__, name)) + try: + setattr(self, name, value) +- except AttributeError, e: +- raise AttributeError, '%s (with attribute %r)' % (e, name) ++ except AttributeError as e: ++ raise AttributeError('%s (with attribute %r)' % (e, name)) + + if toUpdate: + args = [(self.sqlmeta.columns[name].dbName, value) +- for name, value in toUpdate.items()] ++ for name, value in list(toUpdate.items())] + self._connection._SO_update(self, args) + finally: + self._SO_writeLock.release() +@@ -1257,7 +1253,7 @@ class SQLObject(object): + # If we specified an SQL DEFAULT, then we should use that + if default is NoDefault: + if column.defaultSQL is None: +- raise TypeError, "%s() did not get expected keyword argument '%s'" % (self.__class__.__name__, column.name) ++ raise TypeError("%s() did not get expected keyword argument '%s'" % (self.__class__.__name__, column.name)) + else: + # There is defaultSQL for the column - do not put + # the column to kw so that the backend creates the value +@@ -1277,7 +1273,7 @@ class SQLObject(object): + # Here's where an INSERT is finalized. + # These are all the column values that were supposed + # to be set, but were delayed until now: +- setters = self._SO_createValues.items() ++ setters = list(self._SO_createValues.items()) + # Here's their database names: + names = [self.sqlmeta.columns[v[0]].dbName for v in setters] + values = [v[1] for v in setters] +@@ -1316,7 +1312,7 @@ class SQLObject(object): + name = (name,) + value = (value,) + if len(name) != len(value): +- raise ValueError, "'column' and 'value' tuples must be of the same size" ++ raise ValueError("'column' and 'value' tuples must be of the same size") + new_value = [] + for n, v in zip(name, value): + from_python = getattr(cls, '_SO_from_python_' + n) +@@ -1335,13 +1331,13 @@ class SQLObject(object): + result, obj = cls._findAlternateID(name, dbName, value, connection) + if not result: + if idxName is None: +- raise SQLObjectNotFound, "The %s by alternateID %s = %s does not exist" % (cls.__name__, name, repr(value)) ++ raise SQLObjectNotFound("The %s by alternateID %s = %s does not exist" % (cls.__name__, name, repr(value))) + else: + names = [] +- for i in xrange(len(name)): ++ for i in range(len(name)): + names.append("%s = %s" % (name[i], repr(value[i]))) + names = ', '.join(names) +- raise SQLObjectNotFound, "The %s by unique index %s(%s) does not exist" % (cls.__name__, idxName, names) ++ raise SQLObjectNotFound("The %s by unique index %s(%s) does not exist" % (cls.__name__, idxName, names)) + if obj: + return obj + if connection: +@@ -1564,7 +1560,7 @@ class SQLObject(object): + if results.count(): + # Restrictions only apply if there are + # matching records on the related table +- raise SQLObjectIntegrityError, ( ++ raise SQLObjectIntegrityError( + "Tried to delete %s::%s but " + "table %s has a restriction against it" % + (klass.__name__, self.id, k.__name__)) +@@ -1638,7 +1634,7 @@ class SQLObject(object): + + @classmethod + def setConnection(cls, value): +- if isinstance(value, basestring): ++ if isinstance(value, str): + value = dbconnection.connectionForURI(value) + cls._connection = value + +@@ -1720,7 +1716,7 @@ def getID(obj, refColumn=None): + return getattr(obj, refColumn or 'id') + elif isinstance(obj, int): + return obj +- elif isinstance(obj, long): ++ elif isinstance(obj, int): + return int(obj) + elif isinstance(obj, str): + try: +@@ -1733,7 +1729,7 @@ def getID(obj, refColumn=None): + def getObject(obj, klass): + if isinstance(obj, int): + return klass(obj) +- elif isinstance(obj, long): ++ elif isinstance(obj, int): + return klass(int(obj)) + elif isinstance(obj, str): + return klass(int(obj)) +--- sqlobject/manager/command.py.orig 2011-05-15 15:48:27 UTC ++++ sqlobject/manager/command.py +@@ -69,10 +69,10 @@ def db_differences(soClass, conn): + del existing[col.dbName] + else: + missing[col.dbName] = col +- for col in existing.values(): ++ for col in list(existing.values()): + diffs.append('Database has extra column: %s' + % col.dbName) +- for col in missing.values(): ++ for col in list(missing.values()): + diffs.append('Database missing column: %s' % col.dbName) + return diffs + +@@ -96,7 +96,7 @@ class CommandRunner(object): + self.invalid('No COMMAND given (try "%s help")' + % os.path.basename(invoked_as)) + real_command = self.command_aliases.get(command, command) +- if real_command not in self.commands.keys(): ++ if real_command not in list(self.commands.keys()): + self.invalid('COMMAND %s unknown' % command) + runner = self.commands[real_command]( + invoked_as, command, args, self) +@@ -109,7 +109,7 @@ class CommandRunner(object): + self.command_aliases[alias] = name + + def invalid(self, msg, code=2): +- print msg ++ print(msg) + sys.exit(code) + + the_runner = CommandRunner() +@@ -170,10 +170,8 @@ def standard_parser(connection=True, simulate=True, + default=[]) + return parser + +-class Command(object): ++class Command(object, metaclass=DeclarativeMeta): + +- __metaclass__ = DeclarativeMeta +- + min_args = 0 + min_args_error = 'You must provide at least %(min_args)s arguments' + max_args = 0 +@@ -225,7 +223,7 @@ class Command(object): + # Check for circular references + if cls in dependency_stack: + dependency_stack.append(cls) +- raise SQLObjectCircularReferenceError, ( ++ raise SQLObjectCircularReferenceError( + "Found a circular reference: %s " % + (' --> '.join([x.__name__ + for x in dependency_stack]))) +@@ -248,14 +246,14 @@ class Command(object): + sorter.append((level, cls)) + sorter.sort() + ordered_classes = [cls for level, cls in sorter] +- except SQLObjectCircularReferenceError, msg: ++ except SQLObjectCircularReferenceError as msg: + # Failsafe: return the classes as-is if a circular reference + # prevented the dependency levels to be calculated. +- print ("Warning: a circular reference was detected in the " ++ print(("Warning: a circular reference was detected in the " + "model. Unable to sort the classes by dependency: they " + "will be treated in alphabetic order. This may or may " + "not work depending on your database backend. " +- "The error was:\n%s" % msg) ++ "The error was:\n%s" % msg)) + return classes + return ordered_classes + +@@ -347,21 +345,21 @@ class Command(object): + % '\n * '.join([soClass.__name__ + for soClass in missing])) + if require_some and not all: +- print 'No classes found!' ++ print('No classes found!') + if self.options.modules: +- print 'Looked in modules: %s' % ', '.join(self.options.modules) ++ print('Looked in modules: %s' % ', '.join(self.options.modules)) + else: +- print 'No modules specified' ++ print('No modules specified') + if self.options.packages: +- print 'Looked in packages: %s' % ', '.join(self.options.packages) ++ print('Looked in packages: %s' % ', '.join(self.options.packages)) + else: +- print 'No packages specified' ++ print('No packages specified') + if self.options.class_matchers: +- print 'Matching class pattern: %s' % self.options.class_matches ++ print('Matching class pattern: %s' % self.options.class_matches) + if self.options.eggs: +- print 'Looked in eggs: %s' % ', '.join(self.options.eggs) ++ print('Looked in eggs: %s' % ', '.join(self.options.eggs)) + else: +- print 'No eggs specified' ++ print('No eggs specified') + sys.exit(1) + return self.orderClassesByDependencyLevel(all) + +@@ -411,7 +409,7 @@ class Command(object): + if '#' in conf_fn: + conf_fn, conf_section = conf_fn.split('#', 1) + +- from ConfigParser import ConfigParser ++ from configparser import ConfigParser + p = ConfigParser() + # Case-sensitive: + p.optionxform = str +@@ -454,21 +452,20 @@ class Command(object): + def find_classes_in_file(arg, dir_name, filenames): + if dir_name.startswith('.svn'): + return +- filenames = filter(lambda fname: fname.endswith('.py') and fname != '__init__.py', +- filenames) ++ filenames = [fname for fname in filenames if fname.endswith('.py') and fname != '__init__.py'] + for fname in filenames: + module_name = os.path.join(dir_name, fname) + module_name = module_name[module_name.find(package_name):] + module_name = module_name.replace(os.path.sep,'.')[:-3] + try: + module = moduleloader.load_module(module_name) +- except ImportError, err: ++ except ImportError as err: + if self.options.verbose: +- print 'Could not import module "%s". Error was : "%s"' % (module_name, err) ++ print('Could not import module "%s". Error was : "%s"' % (module_name, err)) + continue +- except Exception, exc: ++ except Exception as exc: + if self.options.verbose: +- print 'Unknown exception while processing module "%s" : "%s"' % (module_name, exc) ++ print('Unknown exception while processing module "%s" : "%s"' % (module_name, exc)) + continue + classes = self.classes_from_module(module) + all.extend(classes) +@@ -484,7 +481,7 @@ class Command(object): + if not mod: + continue + if self.options.verbose: +- print 'Looking in module %s' % mod ++ print('Looking in module %s' % mod) + modules.extend(self.classes_from_module( + moduleloader.load_module(mod))) + return modules +@@ -503,7 +500,7 @@ class Command(object): + dist = pkg_resources.get_distribution(egg_spec) + if not dist.has_metadata('sqlobject.txt'): + if warn_no_sqlobject: +- print 'No sqlobject.txt in %s egg info' % egg_spec ++ print('No sqlobject.txt in %s egg info' % egg_spec) + return None, {} + result = {} + for line in dist.get_metadata_lines('sqlobject.txt'): +@@ -513,7 +510,7 @@ class Command(object): + name, value = line.split('=', 1) + name = name.strip().lower() + if name in result: +- print 'Warning: %s appears more than once in sqlobject.txt' % name ++ print('Warning: %s appears more than once in sqlobject.txt' % name) + result[name.strip().lower()] = value.strip() + return dist, result + +@@ -532,12 +529,12 @@ class Command(object): + else: + prompt += ' [y/N]? ' + while 1: +- response = raw_input(prompt).strip() ++ response = input(prompt).strip() + if not response.strip(): + return default + if response and response[0].lower() in ('y', 'n'): + return response[0].lower() == 'y' +- print 'Y or N please' ++ print('Y or N please') + + def shorten_filename(self, fn): + """ +@@ -558,7 +555,7 @@ class Command(object): + f = open(fn, 'w') + f.write(pretext) + f.close() +- print '$EDITOR %s' % fn ++ print('$EDITOR %s' % fn) + os.system('$EDITOR %s' % fn) + f = open(fn, 'r') + content = f.read() +@@ -582,16 +579,16 @@ class CommandSQL(Command): + allConstraints = [] + for cls in classes: + if self.options.verbose >= 1: +- print '-- %s from %s' % ( +- cls.__name__, cls.__module__) ++ print('-- %s from %s' % ( ++ cls.__name__, cls.__module__)) + createSql, constraints = cls.createTableSQL() +- print createSql.strip() + ';\n' ++ print(createSql.strip() + ';\n') + allConstraints.append(constraints) + for constraints in allConstraints: + if constraints: + for constraint in constraints: + if constraint: +- print constraint.strip() + ';\n' ++ print(constraint.strip() + ';\n') + + + class CommandList(Command): +@@ -603,12 +600,12 @@ class CommandList(Command): + + def command(self): + if self.options.verbose >= 1: +- print 'Classes found:' ++ print('Classes found:') + classes = self.classes(require_connection=False) + for soClass in classes: +- print '%s.%s' % (soClass.__module__, soClass.__name__) ++ print('%s.%s' % (soClass.__module__, soClass.__name__)) + if self.options.verbose >= 1: +- print ' Table: %s' % soClass.sqlmeta.table ++ print(' Table: %s' % soClass.sqlmeta.table) + + class CommandCreate(Command): + +@@ -633,26 +630,26 @@ class CommandCreate(Command): + if not self.options.simulate: + try: + soClass._connection.createEmptyDatabase() +- except soClass._connection.module.ProgrammingError, e: ++ except soClass._connection.module.ProgrammingError as e: + if str(e).find('already exists') != -1: +- print 'Database already exists' ++ print('Database already exists') + else: + raise + else: +- print '(simulating; cannot create database)' ++ print('(simulating; cannot create database)') + dbs_created.append(soClass._connection) +- if soClass._connection not in constraints.keys(): ++ if soClass._connection not in list(constraints.keys()): + constraints[soClass._connection] = [] + exists = soClass._connection.tableExists(soClass.sqlmeta.table) + if v >= 1: + if exists: + existing += 1 +- print '%s already exists.' % soClass.__name__ ++ print('%s already exists.' % soClass.__name__) + else: +- print 'Creating %s' % soClass.__name__ ++ print('Creating %s' % soClass.__name__) + if v >= 2: + sql, extra = soClass.createTableSQL() +- print sql ++ print(sql) + if (not self.options.simulate + and not exists): + if self.options.interactive: +@@ -662,22 +659,22 @@ class CommandCreate(Command): + if tableConstraints: + constraints[soClass._connection].append(tableConstraints) + else: +- print 'Cancelled' ++ print('Cancelled') + else: + created += 1 + tableConstraints = soClass.createTable(applyConstraints=False) + if tableConstraints: + constraints[soClass._connection].append(tableConstraints) +- for connection in constraints.keys(): ++ for connection in list(constraints.keys()): + if v >= 2: +- print 'Creating constraints' ++ print('Creating constraints') + for constraintList in constraints[connection]: + for constraint in constraintList: + if constraint: + connection.query(constraint) + if v >= 1: +- print '%i tables created (%i already exist)' % ( +- created, existing) ++ print('%i tables created (%i already exist)' % ( ++ created, existing)) + + + class CommandDrop(Command): +@@ -695,10 +692,10 @@ class CommandDrop(Command): + exists = soClass._connection.tableExists(soClass.sqlmeta.table) + if v >= 1: + if exists: +- print 'Dropping %s' % soClass.__name__ ++ print('Dropping %s' % soClass.__name__) + else: + not_existing += 1 +- print '%s does not exist.' % soClass.__name__ ++ print('%s does not exist.' % soClass.__name__) + if (not self.options.simulate + and exists): + if self.options.interactive: +@@ -706,13 +703,13 @@ class CommandDrop(Command): + dropped += 1 + soClass.dropTable() + else: +- print 'Cancelled' ++ print('Cancelled') + else: + dropped += 1 + soClass.dropTable() + if v >= 1: +- print '%i tables dropped (%i didn\'t exist)' % ( +- dropped, not_existing) ++ print('%i tables dropped (%i didn\'t exist)' % ( ++ dropped, not_existing)) + + class CommandStatus(Command): + +@@ -730,7 +727,7 @@ class CommandStatus(Command): + if self.printed: + return + self.printed = True +- print 'Checking %s...' % soClass.__name__ ++ print('Checking %s...' % soClass.__name__) + + def command(self): + good = 0 +@@ -744,7 +741,7 @@ class CommandStatus(Command): + self.print_class(soClass) + if not conn.tableExists(soClass.sqlmeta.table): + self.print_class(soClass) +- print ' Does not exist in database' ++ print(' Does not exist in database') + missing_tables += 1 + continue + try: +@@ -752,13 +749,13 @@ class CommandStatus(Command): + soClass) + except AttributeError: + if not columnsFromSchema_warning: +- print 'Database does not support reading columns' ++ print('Database does not support reading columns') + columnsFromSchema_warning = True + good += 1 + continue +- except AssertionError, e: +- print 'Cannot read db table %s: %s' % ( +- soClass.sqlmeta.table, e) ++ except AssertionError as e: ++ print('Cannot read db table %s: %s' % ( ++ soClass.sqlmeta.table, e)) + continue + existing = {} + for col in columns: +@@ -772,19 +769,19 @@ class CommandStatus(Command): + missing[col.dbName] = col + if existing: + self.print_class(soClass) +- for col in existing.values(): +- print ' Database has extra column: %s' % col.dbName ++ for col in list(existing.values()): ++ print(' Database has extra column: %s' % col.dbName) + if missing: + self.print_class(soClass) +- for col in missing.values(): +- print ' Database missing column: %s' % col.dbName ++ for col in list(missing.values()): ++ print(' Database missing column: %s' % col.dbName) + if existing or missing: + bad += 1 + else: + good += 1 + if self.options.verbose: +- print '%i in sync; %i out of sync; %i not in database' % ( +- good, bad, missing_tables) ++ print('%i in sync; %i out of sync; %i not in database' % ( ++ good, bad, missing_tables)) + + class CommandHelp(Command): + +@@ -799,20 +796,20 @@ class CommandHelp(Command): + if self.args: + the_runner.run([self.invoked_as, self.args[0], '-h']) + else: +- print 'Available commands:' +- print ' (use "%s help COMMAND" or "%s COMMAND -h" ' % ( +- self.prog_name, self.prog_name) +- print ' for more information)' +- items = the_runner.commands.items() ++ print('Available commands:') ++ print(' (use "%s help COMMAND" or "%s COMMAND -h" ' % ( ++ self.prog_name, self.prog_name)) ++ print(' for more information)') ++ items = list(the_runner.commands.items()) + items.sort() + max_len = max([len(cn) for cn, c in items]) + for command_name, command in items: +- print '%s:%s %s' % (command_name, ++ print('%s:%s %s' % (command_name, + ' '*(max_len-len(command_name)), +- command.summary) ++ command.summary)) + if command.aliases: +- print '%s (Aliases: %s)' % ( +- ' '*max_len, ', '.join(command.aliases)) ++ print('%s (Aliases: %s)' % ( ++ ' '*max_len, ', '.join(command.aliases))) + + class CommandExecute(Command): + +@@ -834,7 +831,7 @@ class CommandExecute(Command): + args = self.args + if self.options.use_stdin: + if self.options.verbose: +- print "Reading additional SQL from stdin (Ctrl-D or Ctrl-Z to finish)..." ++ print("Reading additional SQL from stdin (Ctrl-D or Ctrl-Z to finish)...") + args.append(sys.stdin.read()) + self.conn = self.connection().getConnection() + self.cursor = self.conn.cursor() +@@ -843,22 +840,22 @@ class CommandExecute(Command): + + def execute_sql(self, sql): + if self.options.verbose: +- print sql ++ print(sql) + try: + self.cursor.execute(sql) +- except Exception, e: ++ except Exception as e: + if not self.options.verbose: +- print sql +- print "****Error:" +- print ' ', e ++ print(sql) ++ print("****Error:") ++ print(' ', e) + return + desc = self.cursor.description + rows = self.cursor.fetchall() + if self.options.verbose: + if not self.cursor.rowcount: +- print "No rows accessed" ++ print("No rows accessed") + else: +- print "%i rows accessed" % self.cursor.rowcount ++ print("%i rows accessed" % self.cursor.rowcount) + if desc: + for name, type_code, display_size, internal_size, precision, scale, null_ok in desc: + sys.stdout.write("%s\t" % name) +@@ -867,7 +864,7 @@ class CommandExecute(Command): + for col in row: + sys.stdout.write("%r\t" % col) + sys.stdout.write("\n") +- print ++ print() + + class CommandRecord(Command): + +@@ -928,12 +925,12 @@ class CommandRecord(Command): + sim = self.options.simulate + classes = self.classes() + if not classes: +- print "No classes found!" ++ print("No classes found!") + return + + output_dir = self.find_output_dir() + version = os.path.basename(output_dir) +- print "Creating version %s" % version ++ print("Creating version %s" % version) + conns = [] + files = {} + for cls in self.classes(): +@@ -963,14 +960,14 @@ class CommandRecord(Command): + last_version_dir = self.find_last_version() + if last_version_dir and not self.options.force_create: + if v > 1: +- print "Checking %s to see if it is current" % last_version_dir ++ print("Checking %s to see if it is current" % last_version_dir) + files_copy = files.copy() + for fn in os.listdir(last_version_dir): + if not fn.endswith('.sql'): + continue + if not fn in files_copy: + if v > 1: +- print "Missing file %s" % fn ++ print("Missing file %s" % fn) + break + f = open(os.path.join(last_version_dir, fn), 'r') + content = f.read() +@@ -978,32 +975,32 @@ class CommandRecord(Command): + if (self.strip_comments(files_copy[fn]) + != self.strip_comments(content)): + if v > 1: +- print "Content does not match: %s" % fn ++ print("Content does not match: %s" % fn) + break + del files_copy[fn] + else: + # No differences so far + if not files_copy: + # Used up all files +- print ("Current status matches version %s" +- % os.path.basename(last_version_dir)) ++ print(("Current status matches version %s" ++ % os.path.basename(last_version_dir))) + return + if v > 1: +- print "Extra files: %s" % ', '.join(files_copy.keys()) ++ print("Extra files: %s" % ', '.join(list(files_copy.keys()))) + if v: +- print ("Current state does not match %s" +- % os.path.basename(last_version_dir)) ++ print(("Current state does not match %s" ++ % os.path.basename(last_version_dir))) + if v > 1 and not last_version_dir: +- print "No last version to check" ++ print("No last version to check") + if not sim: + os.mkdir(output_dir) + if v: +- print 'Making directory %s' % self.shorten_filename(output_dir) +- files = files.items() ++ print('Making directory %s' % self.shorten_filename(output_dir)) ++ files = list(files.items()) + files.sort() + for fn, content in files: + if v: +- print ' Writing %s' % self.shorten_filename(fn) ++ print(' Writing %s' % self.shorten_filename(fn)) + if not sim: + f = open(os.path.join(output_dir, fn), 'w') + f.write(content) +@@ -1021,8 +1018,8 @@ class CommandRecord(Command): + diff = ' %s: %s' % (cls.sqlmeta.table, diff) + all_diffs.append(diff) + if all_diffs: +- print 'Database does not match schema:' +- print '\n'.join(all_diffs) ++ print('Database does not match schema:') ++ print('\n'.join(all_diffs)) + for conn in conns: + self.update_db(version, conn) + else: +@@ -1044,17 +1041,17 @@ class CommandRecord(Command): + f = open(fn, 'w') + f.write(text) + f.close() +- print 'Wrote to %s' % fn ++ print('Wrote to %s' % fn) + + def update_db(self, version, conn): + v = self.options.verbose + if not conn.tableExists(SQLObjectVersionTable.sqlmeta.table): + if v: +- print ('Creating table %s' +- % SQLObjectVersionTable.sqlmeta.table) ++ print(('Creating table %s' ++ % SQLObjectVersionTable.sqlmeta.table)) + sql = SQLObjectVersionTable.createTableSQL(connection=conn) + if v > 1: +- print sql ++ print(sql) + if not self.options.simulate: + SQLObjectVersionTable.createTable(connection=conn) + if not self.options.simulate: +@@ -1073,7 +1070,7 @@ class CommandRecord(Command): + if base is None: + base = CONFIG.get('sqlobject_history_dir', '.') + if not os.path.exists(base): +- print 'Creating history directory %s' % self.shorten_filename(base) ++ print('Creating history directory %s' % self.shorten_filename(base)) + if not self.options.simulate: + os.makedirs(base) + return base +@@ -1084,8 +1081,8 @@ class CommandRecord(Command): + dir = os.path.join(self.base_dir(), today + '-' + + self.options.version_name) + if os.path.exists(dir): +- print ("Error, directory already exists: %s" +- % dir) ++ print(("Error, directory already exists: %s" ++ % dir)) + sys.exit(1) + return dir + extra = '' +@@ -1114,18 +1111,18 @@ class CommandRecord(Command): + sim = self.options.simulate + version = self.options.force_db_version + if not self.version_regex.search(version): +- print "Versions must be in the format YYYY-MM-DD..." +- print "You version %s does not fit this" % version ++ print("Versions must be in the format YYYY-MM-DD...") ++ print("You version %s does not fit this" % version) + return + version_dir = os.path.join(self.base_dir(), version) + if not os.path.exists(version_dir): + if v: +- print 'Creating %s' % self.shorten_filename(version_dir) ++ print('Creating %s' % self.shorten_filename(version_dir)) + if not sim: + os.mkdir(version_dir) + elif v: +- print ('Directory %s exists' +- % self.shorten_filename(version_dir)) ++ print(('Directory %s exists' ++ % self.shorten_filename(version_dir))) + if self.options.db_record: + self.update_db(version, self.connection()) + +@@ -1162,51 +1159,51 @@ class CommandUpgrade(CommandRecord): + else: + fname = self.find_last_version() + if fname is None: +- print "No version exists, use 'record' command to create one" ++ print("No version exists, use 'record' command to create one") + return + version_to = os.path.basename(fname) + current = self.current_version() + if v: +- print 'Current version: %s' % current ++ print('Current version: %s' % current) + version_list = self.make_plan(current, version_to) + if not version_list: +- print 'Database up to date' ++ print('Database up to date') + return + if v: +- print 'Plan:' ++ print('Plan:') + for next_version, upgrader in version_list: +- print ' Use %s to upgrade to %s' % ( +- self.shorten_filename(upgrader), next_version) ++ print(' Use %s to upgrade to %s' % ( ++ self.shorten_filename(upgrader), next_version)) + conn = self.connection() + for next_version, upgrader in version_list: + f = open(upgrader) + sql = f.read() + f.close() + if v: +- print "Running:" +- print sql +- print '-'*60 ++ print("Running:") ++ print(sql) ++ print('-'*60) + if not sim: + try: + conn.query(sql) + except: +- print "Error in script: %s" % upgrader ++ print("Error in script: %s" % upgrader) + raise + self.update_db(next_version, conn) +- print 'Done.' ++ print('Done.') + + + def current_version(self): + conn = self.connection() + if not conn.tableExists(SQLObjectVersionTable.sqlmeta.table): +- print 'No sqlobject_version table!' ++ print('No sqlobject_version table!') + sys.exit(1) + versions = list(SQLObjectVersionTable.select(connection=conn)) + if not versions: +- print 'No rows in sqlobject_version!' ++ print('No rows in sqlobject_version!') + sys.exit(1) + if len(versions) > 1: +- print 'Ambiguous sqlobject_version_table' ++ print('Ambiguous sqlobject_version_table') + sys.exit(1) + return versions[0].version + +@@ -1216,9 +1213,9 @@ class CommandUpgrade(CommandRecord): + dbname = self.connection().dbName + next_version, upgrader = self.best_upgrade(current, dest, dbname) + if not upgrader: +- print 'No way to upgrade from %s to %s' % (current, dest) +- print ('(you need a %s/upgrade_%s_%s.sql script)' +- % (current, dbname, dest)) ++ print('No way to upgrade from %s to %s' % (current, dest)) ++ print(('(you need a %s/upgrade_%s_%s.sql script)' ++ % (current, dbname, dest))) + sys.exit(1) + plan = [(next_version, upgrader)] + if next_version == dest: +@@ -1229,42 +1226,42 @@ class CommandUpgrade(CommandRecord): + def best_upgrade(self, current, dest, target_dbname): + current_dir = os.path.join(self.base_dir(), current) + if self.options.verbose > 1: +- print ('Looking in %s for upgraders' +- % self.shorten_filename(current_dir)) ++ print(('Looking in %s for upgraders' ++ % self.shorten_filename(current_dir))) + upgraders = [] + for fn in os.listdir(current_dir): + match = self.upgrade_regex.search(fn) + if not match: + if self.options.verbose > 1: +- print 'Not an upgrade script: %s' % fn ++ print('Not an upgrade script: %s' % fn) + continue + dbname = match.group(1) + version = match.group(2) + if dbname != target_dbname: + if self.options.verbose > 1: +- print 'Not for this database: %s (want %s)' % ( +- dbname, target_dbname) ++ print('Not for this database: %s (want %s)' % ( ++ dbname, target_dbname)) + continue + if version > dest: + if self.options.verbose > 1: +- print 'Version too new: %s (only want %s)' % ( +- version, dest) ++ print('Version too new: %s (only want %s)' % ( ++ version, dest)) + upgraders.append((version, os.path.join(current_dir, fn))) + if not upgraders: + if self.options.verbose > 1: +- print 'No upgraders found in %s' % current_dir ++ print('No upgraders found in %s' % current_dir) + return None, None + upgraders.sort() + return upgraders[-1] + + def update_sys_path(paths, verbose): +- if isinstance(paths, basestring): ++ if isinstance(paths, str): + paths = [paths] + for path in paths: + path = os.path.abspath(path) + if path not in sys.path: + if verbose > 1: +- print 'Adding %s to path' % path ++ print('Adding %s to path' % path) + sys.path.insert(0, path) + + if __name__ == '__main__': +--- sqlobject/maxdb/maxdbconnection.py.orig 2022-03-15 19:15:16 UTC ++++ sqlobject/maxdb/maxdbconnection.py +@@ -246,7 +246,7 @@ class MaxdbConnection(DBAPI): + pkmap[col_name]=True + + if len(pkmap) == 0: +- raise PrimaryKeyNotFounded, tableName ++ raise PrimaryKeyNotFounded(tableName) + + for (field, nullAllowed, default, data_type, data_len, + data_scale) in colData: +--- sqlobject/mysql/mysqlconnection.py.orig 2011-05-08 15:49:57 UTC ++++ sqlobject/mysql/mysqlconnection.py +@@ -69,7 +69,7 @@ class MySQLConnection(DBAPI): + db=self.db, user=self.user, passwd=self.password, **self.kw) + if self.module.version_info[:3] >= (1, 2, 2): + conn.ping(True) # Attempt to reconnect. This setting is persistent. +- except self.module.OperationalError, e: ++ except self.module.OperationalError as e: + conninfo = "; used connection string: host=%(host)s, port=%(port)s, db=%(db)s, user=%(user)s" % self.__dict__ + raise OperationalError(ErrorMessage(e, conninfo)) + +@@ -90,9 +90,9 @@ class MySQLConnection(DBAPI): + conn.autocommit(auto) + + def _executeRetry(self, conn, cursor, query): +- if self.need_unicode and not isinstance(query, unicode): ++ if self.need_unicode and not isinstance(query, str): + try: +- query = unicode(query, self.dbEncoding) ++ query = str(query, self.dbEncoding) + except UnicodeError: + pass + +@@ -111,7 +111,7 @@ class MySQLConnection(DBAPI): + for count in range(3): + try: + return cursor.execute(query) +- except self.module.OperationalError, e: ++ except self.module.OperationalError as e: + if e.args[0] in (self.module.constants.CR.SERVER_GONE_ERROR, self.module.constants.CR.SERVER_LOST): + if count == 2: + raise OperationalError(ErrorMessage(e)) +@@ -119,27 +119,27 @@ class MySQLConnection(DBAPI): + self.printDebug(conn, str(e), 'ERROR') + else: + raise OperationalError(ErrorMessage(e)) +- except self.module.IntegrityError, e: ++ except self.module.IntegrityError as e: + msg = ErrorMessage(e) + if e.args[0] == self.module.constants.ER.DUP_ENTRY: + raise DuplicateEntryError(msg) + else: + raise IntegrityError(msg) +- except self.module.InternalError, e: ++ except self.module.InternalError as e: + raise InternalError(ErrorMessage(e)) +- except self.module.ProgrammingError, e: ++ except self.module.ProgrammingError as e: + raise ProgrammingError(ErrorMessage(e)) +- except self.module.DataError, e: ++ except self.module.DataError as e: + raise DataError(ErrorMessage(e)) +- except self.module.NotSupportedError, e: ++ except self.module.NotSupportedError as e: + raise NotSupportedError(ErrorMessage(e)) +- except self.module.DatabaseError, e: ++ except self.module.DatabaseError as e: + raise DatabaseError(ErrorMessage(e)) +- except self.module.InterfaceError, e: ++ except self.module.InterfaceError as e: + raise InterfaceError(ErrorMessage(e)) +- except self.module.Warning, e: ++ except self.module.Warning as e: + raise Warning(ErrorMessage(e)) +- except self.module.Error, e: ++ except self.module.Error as e: + raise Error(ErrorMessage(e)) + + def _queryInsertID(self, conn, soInstance, id, names, values): +@@ -194,7 +194,7 @@ class MySQLConnection(DBAPI): + # which is not always True (for an embedded application, e.g.) + self.query('DESCRIBE %s' % (tableName)) + return True +- except ProgrammingError, e: ++ except ProgrammingError as e: + if e[0].code == 1146: # ER_NO_SUCH_TABLE + return False + raise +--- sqlobject/postgres/pgconnection.py.orig 2013-09-30 14:25:11 UTC ++++ sqlobject/postgres/pgconnection.py +@@ -142,7 +142,7 @@ class PostgresConnection(DBAPI): + conn = self.module.connect(self.dsn) + else: + conn = self.module.connect(**self.dsn_dict) +- except self.module.OperationalError, e: ++ except self.module.OperationalError as e: + raise OperationalError(ErrorMessage(e, "used connection string %r" % self.dsn)) + + # For printDebug in _executeRetry +@@ -162,29 +162,29 @@ class PostgresConnection(DBAPI): + self.printDebug(conn, query, 'QueryR') + try: + return cursor.execute(query) +- except self.module.OperationalError, e: ++ except self.module.OperationalError as e: + raise OperationalError(ErrorMessage(e)) +- except self.module.IntegrityError, e: ++ except self.module.IntegrityError as e: + msg = ErrorMessage(e) + if e.pgcode == '23505': + raise DuplicateEntryError(msg) + else: + raise IntegrityError(msg) +- except self.module.InternalError, e: ++ except self.module.InternalError as e: + raise InternalError(ErrorMessage(e)) +- except self.module.ProgrammingError, e: ++ except self.module.ProgrammingError as e: + raise ProgrammingError(ErrorMessage(e)) +- except self.module.DataError, e: ++ except self.module.DataError as e: + raise DataError(ErrorMessage(e)) +- except self.module.NotSupportedError, e: ++ except self.module.NotSupportedError as e: + raise NotSupportedError(ErrorMessage(e)) +- except self.module.DatabaseError, e: ++ except self.module.DatabaseError as e: + raise DatabaseError(ErrorMessage(e)) +- except self.module.InterfaceError, e: ++ except self.module.InterfaceError as e: + raise InterfaceError(ErrorMessage(e)) +- except self.module.Warning, e: ++ except self.module.Warning as e: + raise Warning(ErrorMessage(e)) +- except self.module.Error, e: ++ except self.module.Error as e: + raise Error(ErrorMessage(e)) + + def _queryInsertID(self, conn, soInstance, id, names, values): +--- sqlobject/sqlbuilder.py.orig 2013-10-05 12:02:45 UTC ++++ sqlobject/sqlbuilder.py +@@ -67,8 +67,8 @@ import threading + import types + import weakref + +-import classregistry +-from converters import registerConverter, sqlrepr, quote_str, unquote_str ++from . import classregistry ++from .converters import registerConverter, sqlrepr, quote_str, unquote_str + + + class VersionError(Exception): +@@ -86,7 +86,7 @@ class SQLObjectState(object): + safeSQLRE = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_\.]*$') + def sqlIdentifier(obj): + # some db drivers return unicode column names +- return isinstance(obj, basestring) and bool(safeSQLRE.search(obj.strip())) ++ return isinstance(obj, str) and bool(safeSQLRE.search(obj.strip())) + + + def execute(expr, executor): +@@ -97,7 +97,7 @@ def execute(expr, executor): + + + def _str_or_sqlrepr(expr, db): +- if isinstance(expr, basestring): ++ if isinstance(expr, str): + return expr + return sqlrepr(expr, db) + +@@ -181,9 +181,9 @@ class SQLExpression: + return repr(self) + + def __cmp__(self, other): +- raise VersionError, "Python 2.1+ required" ++ raise VersionError("Python 2.1+ required") + def __rcmp__(self, other): +- raise VersionError, "Python 2.1+ required" ++ raise VersionError("Python 2.1+ required") + + def startswith(self, s): + return STARTSWITH(self, s) +@@ -287,7 +287,7 @@ class SQLCall(SQLExpression): + def components(self): + return [self.expr] + list(self.args) + def execute(self, executor): +- raise ValueError, "I don't yet know how to locally execute functions" ++ raise ValueError("I don't yet know how to locally execute functions") + + registerConverter(SQLCall, SQLExprConverter) + +@@ -316,7 +316,7 @@ class SQLConstant(SQLExpression): + def __sqlrepr__(self, db): + return self.const + def execute(self, executor): +- raise ValueError, "I don't yet know how to execute SQL constants" ++ raise ValueError("I don't yet know how to execute SQL constants") + + registerConverter(SQLConstant, SQLExprConverter) + +@@ -391,7 +391,7 @@ class Table(SQLExpression): + def __sqlrepr__(self, db): + return _str_or_sqlrepr(self.tableName, db) + def execute(self, executor): +- raise ValueError, "Tables don't have values" ++ raise ValueError("Tables don't have values") + + class SQLObjectTable(Table): + FieldClass = SQLObjectField +@@ -411,7 +411,7 @@ class SQLObjectTable(Table): + elif attr in self.soClass.sqlmeta.columns: + column = self.soClass.sqlmeta.columns[attr] + return self._getattrFromColumn(column, attr) +- elif attr+'ID' in [k for (k, v) in self.soClass.sqlmeta.columns.items() if v.foreignKey]: ++ elif attr+'ID' in [k for (k, v) in list(self.soClass.sqlmeta.columns.items()) if v.foreignKey]: + attr += 'ID' + column = self.soClass.sqlmeta.columns[attr] + return self._getattrFromColumn(column, attr) +@@ -427,7 +427,7 @@ class SQLObjectTable(Table): + class SQLObjectTableWithJoins(SQLObjectTable): + + def __getattr__(self, attr): +- if attr+'ID' in [k for (k, v) in self.soClass.sqlmeta.columns.items() if v.foreignKey]: ++ if attr+'ID' in [k for (k, v) in list(self.soClass.sqlmeta.columns.items()) if v.foreignKey]: + column = self.soClass.sqlmeta.columns[attr+'ID'] + return self._getattrFromForeignKey(column, attr) + elif attr in [x.joinMethodName for x in self.soClass.sqlmeta.joins]: +@@ -616,7 +616,7 @@ class Select(SQLExpression): + # None doesn't filter anything, it's just a no-op: + return self + clause = self.ops['clause'] +- if isinstance(clause, basestring): ++ if isinstance(clause, str): + clause = SQLConstant('(%s)' % clause) + return self.newClause(AND(clause, filter_clause)) + +@@ -697,7 +697,7 @@ class Select(SQLExpression): + if self.ops['limit'] is not NoDefault: + end = start + self.ops['limit'] + if start or end: +- from dbconnection import dbConnectionForScheme ++ from .dbconnection import dbConnectionForScheme + select = dbConnectionForScheme(db)._queryAddLimitOffset(select, start, end) + if self.ops['forUpdate']: + select += " FOR UPDATE" +@@ -711,7 +711,7 @@ class Insert(SQLExpression): + self.table = table + if valueList: + if values: +- raise TypeError, "You may only give valueList *or* values" ++ raise TypeError("You may only give valueList *or* values") + self.valueList = valueList + else: + self.valueList = [values] +@@ -722,7 +722,7 @@ class Insert(SQLExpression): + allowNonDict = True + template = self.template + if (template is NoDefault) and isinstance(self.valueList[0], dict): +- template = self.valueList[0].keys() ++ template = list(self.valueList[0].keys()) + allowNonDict = False + if template is not NoDefault: + insert += " (%s)" % ", ".join(template) +@@ -732,10 +732,10 @@ class Insert(SQLExpression): + for value in self.valueList: + if isinstance(value, dict): + if template is NoDefault: +- raise TypeError, "You can't mix non-dictionaries with dictionaries in an INSERT if you don't provide a template (%s)" % repr(value) ++ raise TypeError("You can't mix non-dictionaries with dictionaries in an INSERT if you don't provide a template (%s)" % repr(value)) + value = dictToList(template, value) + elif not allowNonDict: +- raise TypeError, "You can't mix non-dictionaries with dictionaries in an INSERT if you don't provide a template (%s)" % repr(value) ++ raise TypeError("You can't mix non-dictionaries with dictionaries in an INSERT if you don't provide a template (%s)" % repr(value)) + listToJoin_app("(%s)" % ", ".join([sqlrepr(v, db) for v in value])) + insert = "%s%s" % (insert, ", ".join(listToJoin)) + return insert +@@ -746,8 +746,8 @@ def dictToList(template, dict): + list = [] + for key in template: + list.append(dict[key]) +- if len(dict.keys()) > len(template): +- raise TypeError, "Extra entries in dictionary that aren't asked for in template (template=%s, dict=%s)" % (repr(template), repr(dict)) ++ if len(list(dict.keys())) > len(template): ++ raise TypeError("Extra entries in dictionary that aren't asked for in template (template=%s, dict=%s)" % (repr(template), repr(dict))) + return list + + class Update(SQLExpression): +@@ -768,7 +768,7 @@ class Update(SQLExpression): + update += "," + update += " %s=%s" % (self.template[i], sqlrepr(self.values[i], db)) + else: +- for key, value in self.values.items(): ++ for key, value in list(self.values.items()): + if first: + first = False + else: +@@ -788,7 +788,7 @@ class Delete(SQLExpression): + def __init__(self, table, where=NoDefault): + self.table = table + if where is NoDefault: +- raise TypeError, "You must give a where clause or pass in None to indicate no where clause" ++ raise TypeError("You must give a where clause or pass in None to indicate no where clause") + self.whereClause = where + def __sqlrepr__(self, db): + whereClause = self.whereClause +@@ -846,7 +846,7 @@ def _IN(item, list): + return SQLOp("IN", item, list) + + def IN(item, list): +- from sresults import SelectResults # Import here to avoid circular import ++ from .sresults import SelectResults # Import here to avoid circular import + if isinstance(list, SelectResults): + query = list.queryForSelect() + query.ops['items'] = [list.sourceClass.q.id] +@@ -880,7 +880,7 @@ def ISNOTNULL(expr): + class ColumnAS(SQLOp): + ''' Just like SQLOp('AS', expr, name) except without the parentheses ''' + def __init__(self, expr, name): +- if isinstance(name, basestring): ++ if isinstance(name, str): + name = SQLConstant(name) + SQLOp.__init__(self, 'AS', expr, name) + def __sqlrepr__(self, db): +@@ -919,11 +919,11 @@ class _LikeQuoted: + return "CONCAT(%s)" % ", ".join(values) + else: + return " || ".join(values) +- elif isinstance(s, basestring): ++ elif isinstance(s, str): + s = _quote_like_special(unquote_str(sqlrepr(s, db)), db) + return quote_str("%s%s%s" % (self.prefix, s, self.postfix), db) + else: +- raise TypeError, "expected str, unicode or SQLExpression, got %s" % type(s) ++ raise TypeError("expected str, unicode or SQLExpression, got %s" % type(s)) + + def _quote_like_special(s, db): + if db in ('postgres', 'rdbhost'): +@@ -1021,9 +1021,9 @@ class SQLJoinConditional(SQLJoin): + (Table1.q.col1, Table2.q.col2) + """ + if not on_condition and not using_columns: +- raise TypeError, "You must give ON condition or USING columns" ++ raise TypeError("You must give ON condition or USING columns") + if on_condition and using_columns: +- raise TypeError, "You must give ON condition or USING columns but not both" ++ raise TypeError("You must give ON condition or USING columns but not both") + SQLJoin.__init__(self, table1, table2, op) + self.on_condition = on_condition + self.using_columns = using_columns +@@ -1254,7 +1254,7 @@ class ImportProxy(SQLExpression): + self.soClass = None + classregistry.registry(registry).addClassCallback(clsName, lambda foreign, me: setattr(me, 'soClass', foreign), self) + +- def __nonzero__(self): ++ def __bool__(self): + return True + + def __getattr__(self, attr): +--- sqlobject/sqlite/sqliteconnection.py.orig 2014-04-12 20:42:31 UTC ++++ sqlobject/sqlite/sqliteconnection.py +@@ -1,7 +1,7 @@ + import base64 + import os +-import thread +-import urllib ++import _thread ++import urllib.request, urllib.parse, urllib.error + from sqlobject.dbconnection import DBAPI, Boolean + from sqlobject import col, sqlbuilder + from sqlobject.dberrors import * +@@ -121,7 +121,7 @@ class SQLiteConnection(DBAPI): + path = "//" + path + else: + path = "///" + path +- path = urllib.quote(path) ++ path = urllib.parse.quote(path) + return 'sqlite:%s' % path + + def getConnection(self): +@@ -133,7 +133,7 @@ class SQLiteConnection(DBAPI): + self._connectionNumbers[id(conn)] = self._connectionCount + self._connectionCount += 1 + return conn +- threadid = thread.get_ident() ++ threadid = _thread.get_ident() + if (self._pool is not None + and threadid in self._threadPool): + conn = self._threadPool[threadid] +@@ -206,30 +206,30 @@ class SQLiteConnection(DBAPI): + self.printDebug(conn, query, 'QueryR') + try: + return cursor.execute(query) +- except self.module.OperationalError, e: ++ except self.module.OperationalError as e: + raise OperationalError(ErrorMessage(e)) +- except self.module.IntegrityError, e: ++ except self.module.IntegrityError as e: + msg = ErrorMessage(e) + if msg.startswith('column') and msg.endswith('not unique') \ + or msg.startswith('UNIQUE constraint failed:'): + raise DuplicateEntryError(msg) + else: + raise IntegrityError(msg) +- except self.module.InternalError, e: ++ except self.module.InternalError as e: + raise InternalError(ErrorMessage(e)) +- except self.module.ProgrammingError, e: ++ except self.module.ProgrammingError as e: + raise ProgrammingError(ErrorMessage(e)) +- except self.module.DataError, e: ++ except self.module.DataError as e: + raise DataError(ErrorMessage(e)) +- except self.module.NotSupportedError, e: ++ except self.module.NotSupportedError as e: + raise NotSupportedError(ErrorMessage(e)) +- except self.module.DatabaseError, e: ++ except self.module.DatabaseError as e: + raise DatabaseError(ErrorMessage(e)) +- except self.module.InterfaceError, e: ++ except self.module.InterfaceError as e: + raise InterfaceError(ErrorMessage(e)) +- except self.module.Warning, e: ++ except self.module.Warning as e: + raise Warning(ErrorMessage(e)) +- except self.module.Error, e: ++ except self.module.Error as e: + raise Error(ErrorMessage(e)) + + def _queryInsertID(self, conn, soInstance, id, names, values): +--- sqlobject/util/moduleloader.py.orig 2011-05-15 15:48:27 UTC ++++ sqlobject/util/moduleloader.py +@@ -15,7 +15,7 @@ def load_module_from_name(filename, module_name): + if not os.path.exists(init_filename): + try: + f = open(init_filename, 'w') +- except (OSError, IOError), e: ++ except (OSError, IOError) as e: + raise IOError( + 'Cannot write __init__.py file into directory %s (%s)\n' + % (os.path.dirname(filename), e)) diff --git a/databases/py-tiledb/Makefile b/databases/py-tiledb/Makefile index 17a722bcebb..cc451f30460 100644 --- a/databases/py-tiledb/Makefile +++ b/databases/py-tiledb/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= tiledb -PORTVERSION= 0.12.1 +PORTVERSION= 0.12.4 CATEGORIES= databases python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -16,10 +16,10 @@ NOT_FOR_ARCHS= i386 NOT_FOR_ARCHS_REASON= is currently 64-bit only BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}cython>=0.27:lang/cython@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}numpy>=1.16.5,1:math/py-numpy@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}numpy>=1.19.2,1:math/py-numpy@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pybind11>=2.6.2:devel/py-pybind11@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}setuptools_scm>=1.5.4:devel/py-setuptools_scm@${PY_FLAVOR} \ - tiledb>=2.2.8:databases/tiledb + tiledb>=2.6.4:databases/tiledb LIB_DEPENDS= libtiledb.so:databases/tiledb RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}numpy>=1.19.2,1:math/py-numpy@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}packaging>=0:devel/py-packaging@${PY_FLAVOR} diff --git a/databases/py-tiledb/distinfo b/databases/py-tiledb/distinfo index 265a6446402..6a03d1d9c4b 100644 --- a/databases/py-tiledb/distinfo +++ b/databases/py-tiledb/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971034 -SHA256 (tiledb-0.12.1.tar.gz) = 396938027d709f04506cf519ffa3979c48823c8677f67f10794698e4c5a756bb -SIZE (tiledb-0.12.1.tar.gz) = 246015 +TIMESTAMP = 1647264506 +SHA256 (tiledb-0.12.4.tar.gz) = e8c10f4e7c9bc2865005f6c7a66adbd2377704de84ef4b97b8eac4f1929052ea +SIZE (tiledb-0.12.4.tar.gz) = 249230 diff --git a/databases/py-zodbpickle/files/patch-2to3 b/databases/py-zodbpickle/files/patch-2to3 new file mode 100644 index 00000000000..5dd11e8c55d --- /dev/null +++ b/databases/py-zodbpickle/files/patch-2to3 @@ -0,0 +1,1029 @@ +--- src/zodbpickle/pickle_2.py.orig 2021-09-29 06:13:32 UTC ++++ src/zodbpickle/pickle_2.py +@@ -27,8 +27,8 @@ Misc variables: + __version__ = "$Revision: 72223 $" # Code version + + from types import * +-from copy_reg import dispatch_table +-from copy_reg import _extension_registry, _inverted_registry, _extension_cache ++from copyreg import dispatch_table ++from copyreg import _extension_registry, _inverted_registry, _extension_cache + import marshal + import sys + import struct +@@ -513,22 +513,22 @@ class Pickler: + if StringType is UnicodeType: + # This is true for Jython + def save_string(self, obj, pack=struct.pack): +- unicode = obj.isunicode() ++ str = obj.isunicode() + + if self.bin: +- if unicode: ++ if str: + obj = obj.encode("utf-8") + l = len(obj) +- if l < 256 and not unicode: ++ if l < 256 and not str: + self.write(SHORT_BINSTRING + chr(l) + obj) + else: + s = pack(" 0 + ashex = hex(x) + njunkchars = 2 + ashex.endswith('L') +@@ -1499,19 +1499,19 @@ def decode_long(data): + + nbytes = len(data) + if nbytes == 0: +- return 0L ++ return 0 + ashex = _binascii.hexlify(data[::-1]) +- n = long(ashex, 16) # quadratic time before Python 2.3; linear now ++ n = int(ashex, 16) # quadratic time before Python 2.3; linear now + if data[-1] >= '\x80': +- n -= 1L << (nbytes * 8) ++ n -= 1 << (nbytes * 8) + return n + + # Shorthands + + try: +- from cStringIO import StringIO ++ from io import StringIO + except ImportError: +- from StringIO import StringIO ++ from io import StringIO + + def dump(obj, file, protocol=None): + Pickler(file, protocol).dump(obj) +--- src/zodbpickle/pickle_3.py.orig 2021-09-29 06:13:32 UTC ++++ src/zodbpickle/pickle_3.py +@@ -601,7 +601,7 @@ class _Pickler: + return + + items = iter(items) +- r = range(self._BATCHSIZE) ++ r = list(range(self._BATCHSIZE)) + while items is not None: + tmp = [] + for i in r: +@@ -631,7 +631,7 @@ class _Pickler: + write(MARK + DICT) + + self.memoize(obj) +- self._batch_setitems(obj.items()) ++ self._batch_setitems(list(obj.items())) + + dispatch[dict] = save_dict + if PyStringMap is not None: +@@ -650,7 +650,7 @@ class _Pickler: + return + + items = iter(items) +- r = range(self._BATCHSIZE) ++ r = list(range(self._BATCHSIZE)) + while items is not None: + tmp = [] + for i in r: +@@ -1182,8 +1182,8 @@ class _Unpickler: + try: + value = func(*args) + except: +- print(sys.exc_info()) +- print(func, args) ++ print((sys.exc_info())) ++ print((func, args)) + raise + stack[-1] = value + dispatch[REDUCE[0]] = load_reduce +@@ -1290,13 +1290,13 @@ class _Unpickler: + if state: + inst_dict = inst.__dict__ + intern = sys.intern +- for k, v in state.items(): ++ for k, v in list(state.items()): + if type(k) is str: +- inst_dict[intern(k)] = v ++ inst_dict[sys.intern(k)] = v + else: + inst_dict[k] = v + if slotstate: +- for k, v in slotstate.items(): ++ for k, v in list(slotstate.items()): + setattr(inst, k, v) + dispatch[BUILD[0]] = load_build + +--- src/zodbpickle/pickletools_2.py.orig 2021-09-29 06:13:32 UTC ++++ src/zodbpickle/pickletools_2.py +@@ -429,7 +429,7 @@ def read_unicodestringnl(f): + raise ValueError("no newline found when trying to read " + "unicodestringnl") + data = data[:-1] # lose the newline +- return unicode(data, 'raw-unicode-escape') ++ return str(data, 'raw-unicode-escape') + + unicodestringnl = ArgumentDescriptor( + name='unicodestringnl', +@@ -465,7 +465,7 @@ def read_unicodestring4(f): + raise ValueError("unicodestring4 byte count < 0: %d" % n) + data = f.read(n) + if len(data) == n: +- return unicode(data, 'utf-8') ++ return str(data, 'utf-8') + raise ValueError("expected %d bytes in a unicodestring4, but only %d " + "remain" % (n, len(data))) + +@@ -509,7 +509,7 @@ def read_decimalnl_short(f): + try: + return int(s) + except OverflowError: +- return long(s) ++ return int(s) + + def read_decimalnl_long(f): + r""" +@@ -532,7 +532,7 @@ def read_decimalnl_long(f): + s = read_stringnl(f, decode=False, stripquotes=False) + if not s.endswith("L"): + raise ValueError("trailing 'L' required in %r" % s) +- return long(s) ++ return int(s) + + + decimalnl_short = ArgumentDescriptor( +@@ -731,12 +731,12 @@ pyint = StackObject( + + pylong = StackObject( + name='long', +- obtype=long, ++ obtype=int, + doc="A long (as opposed to short) Python integer object.") + + pyinteger_or_bool = StackObject( + name='int_or_bool', +- obtype=(int, long, bool), ++ obtype=(int, int, bool), + doc="A Python integer object (short or long), or " + "a Python bool.") + +@@ -757,7 +757,7 @@ pystring = StackObject( + + pyunicode = StackObject( + name='unicode', +- obtype=unicode, ++ obtype=str, + doc="A Python Unicode string object.") + + pynone = StackObject( +@@ -1800,18 +1800,18 @@ def assure_pickle_consistency(verbose=False): + for name in pickle.__all__: + if not re.match("[A-Z][A-Z0-9_]+$", name): + if verbose: +- print "skipping %r: it doesn't look like an opcode name" % name ++ print("skipping %r: it doesn't look like an opcode name" % name) + continue + picklecode = getattr(pickle, name) + if not isinstance(picklecode, str) or len(picklecode) != 1: + if verbose: +- print ("skipping %r: value %r doesn't look like a pickle " +- "code" % (name, picklecode)) ++ print(("skipping %r: value %r doesn't look like a pickle " ++ "code" % (name, picklecode))) + continue + if picklecode in copy: + if verbose: +- print "checking name %r w/ code %r for consistency" % ( +- name, picklecode) ++ print("checking name %r w/ code %r for consistency" % ( ++ name, picklecode)) + d = copy[picklecode] + if d.name != name: + raise ValueError("for pickle code %r, pickle.py uses name %r " +@@ -1827,7 +1827,7 @@ def assure_pickle_consistency(verbose=False): + (name, picklecode)) + if copy: + msg = ["we appear to have pickle opcodes that pickle.py doesn't have:"] +- for code, d in copy.items(): ++ for code, d in list(copy.items()): + msg.append(" name %r with code %r" % (d.name, code)) + raise ValueError("\n".join(msg)) + +@@ -1861,7 +1861,7 @@ def genops(pickle): + to query its current position) pos is None. + """ + +- import cStringIO as StringIO ++ import io as StringIO + + if isinstance(pickle, str): + pickle = StringIO.StringIO(pickle) +@@ -1969,7 +1969,7 @@ def dis(pickle, out=None, memo=None, indentlevel=4): + errormsg = None + for opcode, arg, pos in genops(pickle): + if pos is not None: +- print >> out, "%5d:" % pos, ++ print("%5d:" % pos, end=' ', file=out) + + line = "%-4s %s%s" % (repr(opcode.code)[1:-1], + indentchunk * len(markstack), +@@ -2034,7 +2034,7 @@ def dis(pickle, out=None, memo=None, indentlevel=4): + line += ' ' + repr(arg) + if markmsg: + line += ' ' + markmsg +- print >> out, line ++ print(line, file=out) + + if errormsg: + # Note that we delayed complaining until the offending opcode +@@ -2053,7 +2053,7 @@ def dis(pickle, out=None, memo=None, indentlevel=4): + + stack.extend(after) + +- print >> out, "highest protocol among opcodes =", maxproto ++ print("highest protocol among opcodes =", maxproto, file=out) + if stack: + raise ValueError("stack not empty after STOP: %r" % stack) + +--- src/zodbpickle/tests/pickletester_2.py.orig 2021-09-29 06:13:32 UTC ++++ src/zodbpickle/tests/pickletester_2.py +@@ -1,8 +1,8 @@ + import io + import unittest +-import StringIO +-import cStringIO +-import copy_reg ++import io ++import io ++import copyreg + import sys + + try: +@@ -45,7 +45,7 @@ from zodbpickle import pickletools_2 as pickletools + # for proto in protocols: + # kind of outer loop. + assert pickle.HIGHEST_PROTOCOL == cPickle.HIGHEST_PROTOCOL == 3 +-protocols = range(pickle.HIGHEST_PROTOCOL + 1) ++protocols = list(range(pickle.HIGHEST_PROTOCOL + 1)) + + # Copy of test.test_support.run_with_locale. This is needed to support Python + # 2.4, which didn't include it. This is all to support test_xpickle, which +@@ -78,7 +78,7 @@ def run_with_locale(catstr, *locales): + finally: + if locale and orig_locale: + locale.setlocale(category, orig_locale) +- inner.func_name = func.func_name ++ inner.__name__ = func.__name__ + inner.__doc__ = func.__doc__ + return inner + return decorator +@@ -114,21 +114,21 @@ class ExtensionSaver: + # there is one). + def __init__(self, code): + self.code = code +- if code in copy_reg._inverted_registry: +- self.pair = copy_reg._inverted_registry[code] +- copy_reg.remove_extension(self.pair[0], self.pair[1], code) ++ if code in copyreg._inverted_registry: ++ self.pair = copyreg._inverted_registry[code] ++ copyreg.remove_extension(self.pair[0], self.pair[1], code) + else: + self.pair = None + + # Restore previous registration for code. + def restore(self): + code = self.code +- curpair = copy_reg._inverted_registry.get(code) ++ curpair = copyreg._inverted_registry.get(code) + if curpair is not None: +- copy_reg.remove_extension(curpair[0], curpair[1], code) ++ copyreg.remove_extension(curpair[0], curpair[1], code) + pair = self.pair + if pair is not None: +- copy_reg.add_extension(pair[0], pair[1], code) ++ copyreg.add_extension(pair[0], pair[1], code) + + class C: + def __cmp__(self, other): +@@ -154,8 +154,8 @@ class initarg(C): + class metaclass(type): + pass + +-class use_metaclass(object): +- __metaclass__ = metaclass ++class use_metaclass(object, metaclass=metaclass): ++ pass + + class pickling_metaclass(type): + def __eq__(self, other): +@@ -430,7 +430,7 @@ def create_data(): + c = C() + c.foo = 1 + c.bar = 2 +- x = [0, 1L, 2.0, 3.0+0j] ++ x = [0, 1, 2.0, 3.0+0j] + # Append some integer test cases at cPickle.c's internal size + # cutoffs. + uint1max = 0xff +@@ -498,7 +498,7 @@ class AbstractPickleTests(unittest.TestCase): + + for proto, expected in (0, DATA0_DIS), (1, DATA1_DIS): + s = self.dumps(self._testdata, proto) +- filelike = cStringIO.StringIO() ++ filelike = io.StringIO() + dis(s, out=filelike) + got = filelike.getvalue() + self.assertEqual(expected, got) +@@ -528,7 +528,7 @@ class AbstractPickleTests(unittest.TestCase): + for proto in protocols: + s = self.dumps(d, proto) + x = self.loads(s) +- self.assertEqual(x.keys(), [1]) ++ self.assertEqual(list(x.keys()), [1]) + self.assertTrue(x[1] is x) + + def test_recursive_inst(self): +@@ -551,7 +551,7 @@ class AbstractPickleTests(unittest.TestCase): + x = self.loads(s) + self.assertEqual(len(x), 1) + self.assertEqual(dir(x[0]), dir(i)) +- self.assertEqual(x[0].attr.keys(), [1]) ++ self.assertEqual(list(x[0].attr.keys()), [1]) + self.assertTrue(x[0].attr[1] is x) + + def test_garyp(self): +@@ -576,8 +576,8 @@ class AbstractPickleTests(unittest.TestCase): + + if have_unicode: + def test_unicode(self): +- endcases = [u'', u'<\\u>', u'<\\\u1234>', u'<\n>', +- u'<\\>', u'<\\\U00012345>'] ++ endcases = ['', '<\\u>', '<\\\u1234>', '<\n>', ++ '<\\>', '<\\\U00012345>'] + for proto in protocols: + for u in endcases: + p = self.dumps(u, proto) +@@ -585,7 +585,7 @@ class AbstractPickleTests(unittest.TestCase): + self.assertEqual(u2, u) + + def test_unicode_high_plane(self): +- t = u'\U00012345' ++ t = '\U00012345' + for proto in protocols: + p = self.dumps(t, proto) + t2 = self.loads(p) +@@ -594,7 +594,7 @@ class AbstractPickleTests(unittest.TestCase): + def test_ints(self): + import sys + for proto in protocols: +- n = sys.maxint ++ n = sys.maxsize + while n: + for expected in (-n, n): + s = self.dumps(expected, proto) +@@ -603,7 +603,7 @@ class AbstractPickleTests(unittest.TestCase): + n = n >> 1 + + def test_maxint64(self): +- maxint64 = (1L << 63) - 1 ++ maxint64 = (1 << 63) - 1 + data = 'I' + str(maxint64) + '\n.' + got = self.loads(data) + self.assertEqual(got, maxint64) +@@ -616,7 +616,7 @@ class AbstractPickleTests(unittest.TestCase): + for proto in protocols: + # 256 bytes is where LONG4 begins. + for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257: +- nbase = 1L << nbits ++ nbase = 1 << nbits + for npos in nbase-1, nbase, nbase+1: + for n in npos, -npos: + pickle = self.dumps(n, proto) +@@ -624,7 +624,7 @@ class AbstractPickleTests(unittest.TestCase): + self.assertEqual(n, got) + # Try a monster. This is quadratic-time in protos 0 & 1, so don't + # bother with those. +- nbase = long("deadbeeffeedface", 16) ++ nbase = int("deadbeeffeedface", 16) + nbase += nbase << 1000000 + for n in nbase, -nbase: + p = self.dumps(n, 2) +@@ -661,7 +661,7 @@ class AbstractPickleTests(unittest.TestCase): + + def test_dynamic_class(self): + a = create_dynamic_class("my_dynamic_class", (object,)) +- copy_reg.pickle(pickling_metaclass, pickling_metaclass.__reduce__) ++ copyreg.pickle(pickling_metaclass, pickling_metaclass.__reduce__) + for proto in protocols: + s = self.dumps(a, proto) + b = self.loads(s) +@@ -702,14 +702,14 @@ class AbstractPickleTests(unittest.TestCase): + badpickle = pickle.PROTO + chr(oob) + build_none + try: + self.loads(badpickle) +- except ValueError, detail: ++ except ValueError as detail: + self.assertTrue(str(detail).startswith( + "unsupported pickle protocol")) + else: + self.fail("expected bad protocol number to raise ValueError") + + def test_long1(self): +- x = 12345678910111213141516178920L ++ x = 12345678910111213141516178920 + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) +@@ -717,7 +717,7 @@ class AbstractPickleTests(unittest.TestCase): + self.assertEqual(opcode_in_pickle(pickle.LONG1, s), proto >= 2) + + def test_long4(self): +- x = 12345678910111213141516178920L << (256*8) ++ x = 12345678910111213141516178920 << (256*8) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) +@@ -847,7 +847,7 @@ class AbstractPickleTests(unittest.TestCase): + def produce_global_ext(self, extcode, opcode): + e = ExtensionSaver(extcode) + try: +- copy_reg.add_extension(__name__, "MyList", extcode) ++ copyreg.add_extension(__name__, "MyList", extcode) + x = MyList([1, 2, 3]) + x.foo = 42 + x.bar = "hello" +@@ -891,7 +891,7 @@ class AbstractPickleTests(unittest.TestCase): + + def test_list_chunking(self): + n = 10 # too small to chunk +- x = range(n) ++ x = list(range(n)) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) +@@ -900,7 +900,7 @@ class AbstractPickleTests(unittest.TestCase): + self.assertEqual(num_appends, proto > 0) + + n = 2500 # expect at least two chunks when proto > 0 +- x = range(n) ++ x = list(range(n)) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) +@@ -913,7 +913,7 @@ class AbstractPickleTests(unittest.TestCase): + + def test_dict_chunking(self): + n = 10 # too small to chunk +- x = dict.fromkeys(range(n)) ++ x = dict.fromkeys(list(range(n))) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) +@@ -922,7 +922,7 @@ class AbstractPickleTests(unittest.TestCase): + self.assertEqual(num_setitems, proto > 0) + + n = 2500 # expect at least two chunks when proto > 0 +- x = dict.fromkeys(range(n)) ++ x = dict.fromkeys(list(range(n))) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) +@@ -1025,7 +1025,7 @@ class AbstractPickleTests(unittest.TestCase): + def test_many_puts_and_gets(self): + # Test that internal data structures correctly deal with lots of + # puts/gets. +- keys = ("aaa" + str(i) for i in xrange(100)) ++ keys = ("aaa" + str(i) for i in range(100)) + large_dict = dict((k, [4, 5, 6]) for k in keys) + obj = [dict(large_dict), dict(large_dict), dict(large_dict)] + +@@ -1091,7 +1091,7 @@ class REX_three(object): + self._proto = proto + return REX_two, () + def __reduce__(self): +- raise TestFailed, "This __reduce__ shouldn't be called" ++ raise TestFailed("This __reduce__ shouldn't be called") + + class REX_four(object): + _proto = None +@@ -1117,7 +1117,7 @@ class MyInt(int): + sample = 1 + + class MyLong(long): +- sample = 1L ++ sample = 1 + + class MyFloat(float): + sample = 1.0 +@@ -1128,8 +1128,8 @@ class MyComplex(complex): + class MyStr(str): + sample = "hello" + +-class MyUnicode(unicode): +- sample = u"hello \u1234" ++class MyUnicode(str): ++ sample = "hello \u1234" + + class MyTuple(tuple): + sample = (1, 2, 3) +@@ -1175,7 +1175,7 @@ class AbstractPickleModuleTests(unittest.TestCase): + os.remove(TESTFN) + + def test_load_from_and_dump_to_file(self): +- stream = cStringIO.StringIO() ++ stream = io.StringIO() + data = [123, {}, 124] + self.module.dump(data, stream) + stream.seek(0) +@@ -1187,7 +1187,7 @@ class AbstractPickleModuleTests(unittest.TestCase): + self.assertEqual(self.module.HIGHEST_PROTOCOL, 3) + + def test_callapi(self): +- f = cStringIO.StringIO() ++ f = io.StringIO() + # With and without keyword arguments + self.module.dump(123, f, -1) + self.module.dump(123, file=f, protocol=-1) +@@ -1197,7 +1197,7 @@ class AbstractPickleModuleTests(unittest.TestCase): + self.module.Pickler(f, protocol=-1) + + def test_incomplete_input(self): +- s = StringIO.StringIO("X''.") ++ s = io.StringIO("X''.") + self.assertRaises(EOFError, self.module.load, s) + + @skipIf(_is_pypy or _is_jython, "Fails to access the redefined builtins") +@@ -1207,7 +1207,7 @@ class AbstractPickleModuleTests(unittest.TestCase): + '__import__': __import__} + d = {} + teststr = "def f(): pickleme.dumps(0)" +- exec teststr in {'__builtins__': builtins}, d ++ exec(teststr, {'__builtins__': builtins}, d) + d['f']() + + def test_bad_input(self): +@@ -1242,7 +1242,7 @@ class AbstractPersistentPicklerTests(unittest.TestCase + def test_persistence(self): + self.id_count = 0 + self.load_count = 0 +- L = range(10) ++ L = list(range(10)) + self.assertEqual(self.loads(self.dumps(L)), L) + self.assertEqual(self.id_count, 5) + self.assertEqual(self.load_count, 5) +@@ -1250,7 +1250,7 @@ class AbstractPersistentPicklerTests(unittest.TestCase + def test_bin_persistence(self): + self.id_count = 0 + self.load_count = 0 +- L = range(10) ++ L = list(range(10)) + self.assertEqual(self.loads(self.dumps(L, 1)), L) + self.assertEqual(self.id_count, 5) + self.assertEqual(self.load_count, 5) +@@ -1282,7 +1282,7 @@ class AbstractPicklerUnpicklerObjectTests(unittest.Tes + # object again, the third serialized form should be identical to the + # first one we obtained. + data = ["abcdefg", "abcdefg", 44] +- f = cStringIO.StringIO() ++ f = io.StringIO() + pickler = self.pickler_class(f) + + pickler.dump(data) +@@ -1309,13 +1309,13 @@ class AbstractPicklerUnpicklerObjectTests(unittest.Tes + def test_priming_pickler_memo(self): + # Verify that we can set the Pickler's memo attribute. + data = ["abcdefg", "abcdefg", 44] +- f = cStringIO.StringIO() ++ f = io.StringIO() + pickler = self.pickler_class(f) + + pickler.dump(data) + first_pickled = f.getvalue() + +- f = cStringIO.StringIO() ++ f = io.StringIO() + primed = self.pickler_class(f) + primed.memo = pickler.memo + +@@ -1327,25 +1327,25 @@ class AbstractPicklerUnpicklerObjectTests(unittest.Tes + def test_priming_unpickler_memo(self): + # Verify that we can set the Unpickler's memo attribute. + data = ["abcdefg", "abcdefg", 44] +- f = cStringIO.StringIO() ++ f = io.StringIO() + pickler = self.pickler_class(f) + + pickler.dump(data) + first_pickled = f.getvalue() + +- f = cStringIO.StringIO() ++ f = io.StringIO() + primed = self.pickler_class(f) + primed.memo = pickler.memo + + primed.dump(data) + primed_pickled = f.getvalue() + +- unpickler = self.unpickler_class(cStringIO.StringIO(first_pickled)) ++ unpickler = self.unpickler_class(io.StringIO(first_pickled)) + unpickled_data1 = unpickler.load() + + self.assertEqual(unpickled_data1, data) + +- primed = self.unpickler_class(cStringIO.StringIO(primed_pickled)) ++ primed = self.unpickler_class(io.StringIO(primed_pickled)) + primed.memo = unpickler.memo + unpickled_data2 = primed.load() + +@@ -1356,18 +1356,18 @@ class AbstractPicklerUnpicklerObjectTests(unittest.Tes + + def test_reusing_unpickler_objects(self): + data1 = ["abcdefg", "abcdefg", 44] +- f = cStringIO.StringIO() ++ f = io.StringIO() + pickler = self.pickler_class(f) + pickler.dump(data1) + pickled1 = f.getvalue() + + data2 = ["abcdefg", 44, 44] +- f = cStringIO.StringIO() ++ f = io.StringIO() + pickler = self.pickler_class(f) + pickler.dump(data2) + pickled2 = f.getvalue() + +- f = cStringIO.StringIO() ++ f = io.StringIO() + f.write(pickled1) + f.seek(0) + unpickler = self.unpickler_class(f) +--- src/zodbpickle/tests/pickletester_3.py.orig 2021-09-29 06:13:32 UTC ++++ src/zodbpickle/tests/pickletester_3.py +@@ -39,7 +39,7 @@ from . import _is_pypy + # Tests that try a number of pickle protocols should have a + # for proto in protocols: + # kind of outer loop. +-protocols = range(pickle.HIGHEST_PROTOCOL + 1) ++protocols = list(range(pickle.HIGHEST_PROTOCOL + 1)) + + ascii_char_size = 1 + +@@ -650,10 +650,10 @@ class AbstractPickleTests(unittest.TestCase): + self.assertRaises(ValueError, self.loads, buf) + + def test_unicode(self): +- endcases = ['', '<\\u>', '<\\\u1234>', '<\n>', +- '<\\>', '<\\\U00012345>', ++ endcases = ['', '<\\u>', '<\\\\u1234>', '<\n>', ++ '<\\>', '<\\\\U00012345>', + # surrogates +- '<\udc80>'] ++ '<\\udc80>'] + for proto in protocols: + for u in endcases: + p = self.dumps(u, proto) +@@ -661,7 +661,7 @@ class AbstractPickleTests(unittest.TestCase): + self.assertEqual(u2, u) + + def test_unicode_high_plane(self): +- t = '\U00012345' ++ t = '\\U00012345' + for proto in protocols: + p = self.dumps(t, proto) + t2 = self.loads(p) +@@ -1017,7 +1017,7 @@ class AbstractPickleTests(unittest.TestCase): + + def test_dict_chunking(self): + n = 10 # too small to chunk +- x = dict.fromkeys(range(n)) ++ x = dict.fromkeys(list(range(n))) + for proto in protocols: + s = self.dumps(x, proto) + self.assertIsInstance(s, bytes_types) +@@ -1027,7 +1027,7 @@ class AbstractPickleTests(unittest.TestCase): + self.assertEqual(num_setitems, proto > 0) + + n = 2500 # expect at least two chunks when proto > 0 +- x = dict.fromkeys(range(n)) ++ x = dict.fromkeys(list(range(n))) + for proto in protocols: + s = self.dumps(x, proto) + y = self.loads(s) +@@ -1170,7 +1170,7 @@ class AbstractPickleTests(unittest.TestCase): + loaded = self.loads(DATA3) + self.assertEqual(loaded, set([1, 2])) + loaded = self.loads(DATA4) +- self.assertEqual(type(loaded), type(range(0))) ++ self.assertEqual(type(loaded), type(list(range(0)))) + self.assertEqual(list(loaded), list(range(5))) + loaded = self.loads(DATA5) + self.assertEqual(type(loaded), SimpleCookie) +@@ -1189,7 +1189,7 @@ class AbstractPickleTests(unittest.TestCase): + # the same result as Python 2.x did. + # NOTE: this test is a bit too strong since we can produce different + # bytecode that 2.x will still understand. +- dumped = self.dumps(range(5), 2) ++ dumped = self.dumps(list(range(5)), 2) + self.assertEqual(dumped, DATA4) + + dumped = self.dumps(set([3]), 2) +@@ -1328,24 +1328,24 @@ class AbstractBytestrTests(unittest.TestCase): + + def test_load_unicode_protocol_0(self): + """ Test unicode with protocol=0 +- python 2: pickle.dumps(u"\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440", protocol=0) """ ++ python 2: pickle.dumps(u"\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440", protocol=0) """ + self.unpickleEqual( + b'V\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440\np0\n.', +- '\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440') ++ '\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440') + + def test_load_unicode_protocol_1(self): + """ Test unicode with protocol=1 +- python 2: pickle.dumps(u"\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440", protocol=1) """ ++ python 2: pickle.dumps(u"\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440", protocol=1) """ + self.unpickleEqual( + b'X\x12\x00\x00\x00\xd0\x9a\xd0\xbe\xd0\xbc\xd0\xbf\xd1\x8c\xd1\x8e\xd1\x82\xd0\xb5\xd1\x80q\x00.', +- '\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440') ++ '\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440') + + def test_load_unicode_protocol_2(self): + """ Test unicode with protocol=1 +- python 2: pickle.dumps(u"\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440", protocol=2) """ ++ python 2: pickle.dumps(u"\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440", protocol=2) """ + self.unpickleEqual( + b'\x80\x02X\x12\x00\x00\x00\xd0\x9a\xd0\xbe\xd0\xbc\xd0\xbf\xd1\x8c\xd1\x8e\xd1\x82\xd0\xb5\xd1\x80q\x00.', +- '\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440') ++ '\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440') + + def test_load_long_str_protocol_1(self): + """ Test long str with protocol=1 +@@ -1511,7 +1511,7 @@ class REX_seven(object): + def __setitem__(self, key, value): + self.table[key] = value + def __reduce__(self): +- return type(self), (), None, None, iter(self.table.items()) ++ return type(self), (), None, None, iter(list(self.table.items())) + + + # Test classes for newobj +@@ -1529,7 +1529,7 @@ class MyStr(str): + sample = "hello" + + class MyUnicode(str): +- sample = "hello \u1234" ++ sample = "hello \\u1234" + + class MyTuple(tuple): + sample = (1, 2, 3) +@@ -2028,14 +2028,14 @@ if __name__ == "__main__": + x = create_data() + for i in range(3): + p = pickle.dumps(x, i) +- print("DATA{0} = (".format(i)) ++ print(("DATA{0} = (".format(i))) + for j in range(0, len(p), 20): + b = bytes(p[j:j+20]) +- print(" {0!r}".format(b)) ++ print((" {0!r}".format(b))) + print(")") + print() +- print("# Disassembly of DATA{0}".format(i)) +- print("DATA{0}_DIS = \"\"\"\\".format(i)) ++ print(("# Disassembly of DATA{0}".format(i))) ++ print(("DATA{0}_DIS = \"\"\"\\".format(i))) + dis(p) + print("\"\"\"") + print() +--- src/zodbpickle/tests/test_pickle_2.py.orig 2021-09-29 06:13:32 UTC ++++ src/zodbpickle/tests/test_pickle_2.py +@@ -1,7 +1,7 @@ +-import cStringIO + import io ++import io + import unittest +-from cStringIO import StringIO ++from io import StringIO + + from .pickletester_2 import (AbstractPickleTests, + AbstractPickleModuleTests, +@@ -13,7 +13,7 @@ from .pickletester_2 import (AbstractPickleTests, + from test import test_support + + class cStringIOMixin: +- output = input = cStringIO.StringIO ++ output = input = io.StringIO + + def close(self, f): + pass +@@ -70,7 +70,7 @@ class PicklerTests(AbstractPickleTests): + + def dumps(self, arg, proto=0, fast=0): + from zodbpickle.pickle_2 import Pickler +- f = cStringIO.StringIO() ++ f = io.StringIO() + p = Pickler(f, proto) + if fast: + p.fast = fast +@@ -80,7 +80,7 @@ class PicklerTests(AbstractPickleTests): + + def loads(self, buf): + from zodbpickle.pickle_2 import Unpickler +- f = cStringIO.StringIO(buf) ++ f = io.StringIO(buf) + u = Unpickler(f) + return u.load() + +@@ -92,7 +92,7 @@ class PersPicklerTests(AbstractPersistentPicklerTests) + class PersPickler(Pickler): + def persistent_id(subself, obj): + return self.persistent_id(obj) +- f = cStringIO.StringIO() ++ f = io.StringIO() + p = PersPickler(f, proto) + if fast: + p.fast = fast +@@ -105,7 +105,7 @@ class PersPicklerTests(AbstractPersistentPicklerTests) + class PersUnpickler(Unpickler): + def persistent_load(subself, obj): + return self.persistent_load(obj) +- f = cStringIO.StringIO(buf) ++ f = io.StringIO(buf) + u = PersUnpickler(f) + return u.load() + diff --git a/databases/qof/Makefile b/databases/qof/Makefile index 1551116153e..d64764cfd57 100644 --- a/databases/qof/Makefile +++ b/databases/qof/Makefile @@ -1,6 +1,6 @@ PORTNAME= qof PORTVERSION= 0.8.8 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= databases MASTER_SITES= DEBIAN_POOL DISTNAME= qof_${PORTVERSION}.orig diff --git a/databases/rocksdb/Makefile b/databases/rocksdb/Makefile index 21fa7dde412..1a6c06ac22a 100644 --- a/databases/rocksdb/Makefile +++ b/databases/rocksdb/Makefile @@ -1,7 +1,7 @@ -# Created by: Sunpoet Po-Chuan Hsieh +# Created by: Po-Chuan Hsieh PORTNAME= rocksdb -PORTVERSION= 6.29.3 +PORTVERSION= 7.0.1 DISTVERSIONPREFIX= v CATEGORIES= databases @@ -22,7 +22,7 @@ BUILD_DEPENDS= bash:shells/bash LIB_DEPENDS= libgflags.so:devel/gflags \ libsnappy.so:archivers/snappy -USES= compiler:c++11-lib gmake localbase:ldflags perl5 +USES= compiler:c++17-lang gmake localbase:ldflags perl5 USE_PERL5= build ALL_TARGET= shared_lib static_lib all @@ -32,7 +32,7 @@ CONFIGURE_ENV= PORTABLE=0 ROCKSDB_ROOT=${WRKSRC} CONFIGURE_SCRIPT= build_tools/build_detect_platform HAS_CONFIGURE= yes MAKE_ENV= CXX=${CXX} INSTALL_PATH=${STAGEDIR}${PREFIX} USE_RTTI=1 -USE_CXXSTD= c++11 +USE_CXXSTD= c++17 USE_LDCONFIG= yes TEST_TARGET= check diff --git a/databases/rocksdb/distinfo b/databases/rocksdb/distinfo index 9b128387c2d..51cea5b8e70 100644 --- a/databases/rocksdb/distinfo +++ b/databases/rocksdb/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057872 -SHA256 (facebook-rocksdb-v6.29.3_GH0.tar.gz) = 724e4cba2db6668ff6a21ecabcce0782cd0c8e386796e7e9a14a8260e0600abd -SIZE (facebook-rocksdb-v6.29.3_GH0.tar.gz) = 9502512 +TIMESTAMP = 1647264422 +SHA256 (facebook-rocksdb-v7.0.1_GH0.tar.gz) = f1547be4dd76ca30d74e8d1377b893d36ecb7b526ffd835638ad34feb79be174 +SIZE (facebook-rocksdb-v7.0.1_GH0.tar.gz) = 9501192 diff --git a/databases/rocksdb/files/patch-Makefile b/databases/rocksdb/files/patch-Makefile index 347153273ec..2c820e4c7ab 100644 --- a/databases/rocksdb/files/patch-Makefile +++ b/databases/rocksdb/files/patch-Makefile @@ -1,6 +1,6 @@ ---- Makefile.orig 2021-06-25 21:15:04 UTC +--- Makefile.orig 2022-03-10 06:51:01 UTC +++ Makefile -@@ -231,16 +231,16 @@ AM_SHARE = $(AM_V_CCLD) $(CXX) $(PLATFORM_SHARED_LDFLA +@@ -220,17 +220,17 @@ AM_SHARE = $(AM_V_CCLD) $(CXX) $(PLATFORM_SHARED_LDFLA # Detect what platform we're building on. # Export some common variables that might have been passed as Make variables # instead of environment variables. @@ -13,6 +13,7 @@ - export PORTABLE="$(PORTABLE)"; \ - export ROCKSDB_NO_FBCODE="$(ROCKSDB_NO_FBCODE)"; \ - export USE_CLANG="$(USE_CLANG)"; \ +- export LIB_MODE="$(LIB_MODE)"; \ - "$(CURDIR)/build_tools/build_detect_platform" "$(CURDIR)/make_config.mk")) +#dummy := $(shell (export ROCKSDB_ROOT="$(CURDIR)"; \ +# export CXXFLAGS="$(EXTRA_CXXFLAGS)"; \ @@ -23,11 +24,12 @@ +# export PORTABLE="$(PORTABLE)"; \ +# export ROCKSDB_NO_FBCODE="$(ROCKSDB_NO_FBCODE)"; \ +# export USE_CLANG="$(USE_CLANG)"; \ ++# export LIB_MODE="$(LIB_MODE)"; \ +# "$(CURDIR)/build_tools/build_detect_platform" "$(CURDIR)/make_config.mk")) # this file is generated by the previous line to set build flags and sources include make_config.mk -@@ -715,7 +715,7 @@ endif # PLATFORM_SHARED_EXT +@@ -796,7 +796,7 @@ endif # PLATFORM_SHARED_EXT blackbox_crash_test_with_ts whitebox_crash_test_with_ts diff --git a/databases/rocksdb/pkg-plist b/databases/rocksdb/pkg-plist index c95799a4ae9..4cb9faddb86 100644 --- a/databases/rocksdb/pkg-plist +++ b/databases/rocksdb/pkg-plist @@ -80,14 +80,12 @@ include/rocksdb/types.h include/rocksdb/unique_id.h include/rocksdb/universal_compaction.h include/rocksdb/utilities/backup_engine.h -include/rocksdb/utilities/backupable_db.h include/rocksdb/utilities/cache_dump_load.h include/rocksdb/utilities/checkpoint.h include/rocksdb/utilities/convenience.h include/rocksdb/utilities/customizable_util.h include/rocksdb/utilities/db_ttl.h include/rocksdb/utilities/debug.h -include/rocksdb/utilities/env_librados.h include/rocksdb/utilities/env_mirror.h include/rocksdb/utilities/info_log_finder.h include/rocksdb/utilities/ldb_cmd.h @@ -101,7 +99,6 @@ include/rocksdb/utilities/optimistic_transaction_db.h include/rocksdb/utilities/option_change_migration.h include/rocksdb/utilities/options_type.h include/rocksdb/utilities/options_util.h -include/rocksdb/utilities/regex.h include/rocksdb/utilities/replayer.h include/rocksdb/utilities/sim_cache.h include/rocksdb/utilities/stackable_db.h @@ -109,7 +106,6 @@ include/rocksdb/utilities/table_properties_collectors.h include/rocksdb/utilities/transaction.h include/rocksdb/utilities/transaction_db.h include/rocksdb/utilities/transaction_db_mutex.h -include/rocksdb/utilities/utility_db.h include/rocksdb/utilities/write_batch_with_index.h include/rocksdb/version.h include/rocksdb/wal_filter.h @@ -118,13 +114,13 @@ include/rocksdb/write_batch_base.h include/rocksdb/write_buffer_manager.h %%LITE%%lib/librocksdb-lite.a %%LITE%%lib/librocksdb-lite.so -%%LITE%%lib/librocksdb-lite.so.6 +%%LITE%%lib/librocksdb-lite.so.7 %%LITE%%lib/librocksdb-lite.so.%%SHLIB_VER%% %%LITE%%lib/librocksdb-lite.so.%%PORTVERSION%% %%LITE%%lib/librocksdb-lite_tools.so %%NO_LITE%%lib/librocksdb.a %%NO_LITE%%lib/librocksdb.so -%%NO_LITE%%lib/librocksdb.so.6 +%%NO_LITE%%lib/librocksdb.so.7 %%NO_LITE%%lib/librocksdb.so.%%SHLIB_VER%% %%NO_LITE%%lib/librocksdb.so.%%PORTVERSION%% %%NO_LITE%%lib/librocksdb_tools.so diff --git a/databases/rubygem-activemodel52/Makefile b/databases/rubygem-activemodel52/Makefile index 8cc182d5a76..bfea30ecb42 100644 --- a/databases/rubygem-activemodel52/Makefile +++ b/databases/rubygem-activemodel52/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= activemodel -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/databases/rubygem-activemodel52/distinfo b/databases/rubygem-activemodel52/distinfo index de37fe37647..65dd1c76aa7 100644 --- a/databases/rubygem-activemodel52/distinfo +++ b/databases/rubygem-activemodel52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298694 -SHA256 (rubygem/activemodel-5.2.6.gem) = 467f960c0d787a825e21687619d131acbcbaf645add91c5b34b5a1fc2d9b48e4 -SIZE (rubygem/activemodel-5.2.6.gem) = 56832 +TIMESTAMP = 1647264886 +SHA256 (rubygem/activemodel-5.2.7.gem) = 52a9c9ac244b565fe768e5b3da9de390c68f58043b641b9fef1b37f51078f810 +SIZE (rubygem/activemodel-5.2.7.gem) = 56832 diff --git a/databases/rubygem-activemodel60/Makefile b/databases/rubygem-activemodel60/Makefile index af8334d8f19..9d3ca1a67ee 100644 --- a/databases/rubygem-activemodel60/Makefile +++ b/databases/rubygem-activemodel60/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= activemodel -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/databases/rubygem-activemodel60/distinfo b/databases/rubygem-activemodel60/distinfo index bcdc78b1cf6..85a2b43bbcb 100644 --- a/databases/rubygem-activemodel60/distinfo +++ b/databases/rubygem-activemodel60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058902 -SHA256 (rubygem/activemodel-6.0.4.6.gem) = a4967be0d71179c60ac5a68394aa81a45d50cda6b8fa536bd6592ae7af558482 -SIZE (rubygem/activemodel-6.0.4.6.gem) = 58880 +TIMESTAMP = 1647264908 +SHA256 (rubygem/activemodel-6.0.4.7.gem) = 1d46fcfc1f35852470dd647cfcfaf1e3e254496df877b83768d0b90c6c68f64f +SIZE (rubygem/activemodel-6.0.4.7.gem) = 58880 diff --git a/databases/rubygem-activemodel61/Makefile b/databases/rubygem-activemodel61/Makefile index aeb2d4c8206..e31b2d4e2f9 100644 --- a/databases/rubygem-activemodel61/Makefile +++ b/databases/rubygem-activemodel61/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= activemodel -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/databases/rubygem-activemodel61/distinfo b/databases/rubygem-activemodel61/distinfo index d78a1446e9d..04b3b836c67 100644 --- a/databases/rubygem-activemodel61/distinfo +++ b/databases/rubygem-activemodel61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058928 -SHA256 (rubygem/activemodel-6.1.4.6.gem) = 9b02d78ea5f97d0de898dc0ef975a344cea6c8216089f3ebd84f85371e52f947 -SIZE (rubygem/activemodel-6.1.4.6.gem) = 62464 +TIMESTAMP = 1647264934 +SHA256 (rubygem/activemodel-6.1.4.7.gem) = 3bc79221972b8ce4e9aafc1abcd2a62b06bfce91cba43b048893f5764b7ebf06 +SIZE (rubygem/activemodel-6.1.4.7.gem) = 62464 diff --git a/databases/rubygem-activemodel70/Makefile b/databases/rubygem-activemodel70/Makefile index accedd87703..f4cc02f7995 100644 --- a/databases/rubygem-activemodel70/Makefile +++ b/databases/rubygem-activemodel70/Makefile @@ -1,5 +1,5 @@ PORTNAME= activemodel -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/databases/rubygem-activemodel70/distinfo b/databases/rubygem-activemodel70/distinfo index 767453b270d..3ef5a06bb3f 100644 --- a/databases/rubygem-activemodel70/distinfo +++ b/databases/rubygem-activemodel70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058954 -SHA256 (rubygem/activemodel-7.0.2.gem) = 6f73399177c3632fd57f763341b2acb8d8dbb7074561c89d9a11d3175d94d147 -SIZE (rubygem/activemodel-7.0.2.gem) = 60928 +TIMESTAMP = 1647264960 +SHA256 (rubygem/activemodel-7.0.2.3.gem) = 9c2cf8dfa4cc2e4bf0401a1eaa534d6d5f6921a1d912e0c7ddee1ba5fa2946e4 +SIZE (rubygem/activemodel-7.0.2.3.gem) = 60928 diff --git a/databases/rubygem-activerecord52/Makefile b/databases/rubygem-activerecord52/Makefile index 198c834efe0..0357b7353f4 100644 --- a/databases/rubygem-activerecord52/Makefile +++ b/databases/rubygem-activerecord52/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= activerecord -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/databases/rubygem-activerecord52/distinfo b/databases/rubygem-activerecord52/distinfo index e47542ae97a..623adc50567 100644 --- a/databases/rubygem-activerecord52/distinfo +++ b/databases/rubygem-activerecord52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298696 -SHA256 (rubygem/activerecord-5.2.6.gem) = ecbb022ce4b00c95323ce3bd8eedbd4d98e88497efb6d0c637b4f60759b41817 -SIZE (rubygem/activerecord-5.2.6.gem) = 358400 +TIMESTAMP = 1647264888 +SHA256 (rubygem/activerecord-5.2.7.gem) = 0a31e9015ce8d60a1a8c11b80b88a1adce21e1b47a08345d9d51447343a1517a +SIZE (rubygem/activerecord-5.2.7.gem) = 358400 diff --git a/databases/rubygem-activerecord60/Makefile b/databases/rubygem-activerecord60/Makefile index 9b356da8bde..9990ec63dc0 100644 --- a/databases/rubygem-activerecord60/Makefile +++ b/databases/rubygem-activerecord60/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= activerecord -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/databases/rubygem-activerecord60/distinfo b/databases/rubygem-activerecord60/distinfo index 68b81dec5df..13aca2e19b1 100644 --- a/databases/rubygem-activerecord60/distinfo +++ b/databases/rubygem-activerecord60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058904 -SHA256 (rubygem/activerecord-6.0.4.6.gem) = 4049080170d21dc70be26593578ca06c994a502094c1ac3ad8516aaf9259322a -SIZE (rubygem/activerecord-6.0.4.6.gem) = 410624 +TIMESTAMP = 1647264910 +SHA256 (rubygem/activerecord-6.0.4.7.gem) = 583c33b2ddd483b4bc9b71188b2fe75b352cd62d65f23191f73428d3580d7ca6 +SIZE (rubygem/activerecord-6.0.4.7.gem) = 410624 diff --git a/databases/rubygem-activerecord61/Makefile b/databases/rubygem-activerecord61/Makefile index aba7669a078..32264890b47 100644 --- a/databases/rubygem-activerecord61/Makefile +++ b/databases/rubygem-activerecord61/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= activerecord -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/databases/rubygem-activerecord61/distinfo b/databases/rubygem-activerecord61/distinfo index f7bf34e20f3..6eb8c66866e 100644 --- a/databases/rubygem-activerecord61/distinfo +++ b/databases/rubygem-activerecord61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058930 -SHA256 (rubygem/activerecord-6.1.4.6.gem) = 5aebe8546275d7a027fb1fd9a59576882f5cfaa34031c429ff5e88202e6f6097 -SIZE (rubygem/activerecord-6.1.4.6.gem) = 430592 +TIMESTAMP = 1647264936 +SHA256 (rubygem/activerecord-6.1.4.7.gem) = 341dd4354e7788f190cb9501f50982361e44a5c7f059adb5641ffe7620cfb1c5 +SIZE (rubygem/activerecord-6.1.4.7.gem) = 430592 diff --git a/databases/rubygem-activerecord70/Makefile b/databases/rubygem-activerecord70/Makefile index 98e59231ffd..3766de4a7bf 100644 --- a/databases/rubygem-activerecord70/Makefile +++ b/databases/rubygem-activerecord70/Makefile @@ -1,5 +1,5 @@ PORTNAME= activerecord -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= databases rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/databases/rubygem-activerecord70/distinfo b/databases/rubygem-activerecord70/distinfo index b49696b49d0..c90567d7e71 100644 --- a/databases/rubygem-activerecord70/distinfo +++ b/databases/rubygem-activerecord70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058956 -SHA256 (rubygem/activerecord-7.0.2.gem) = 44a6b09afb8cdf193b138568c5a32f5eab58d7d1f17dc539e746467a0695d0ea -SIZE (rubygem/activerecord-7.0.2.gem) = 473088 +TIMESTAMP = 1647264962 +SHA256 (rubygem/activerecord-7.0.2.3.gem) = 28e860d0f53c59813e581a07f8ccf7be0142c1c1bbd21deb1f83470a9f6c9280 +SIZE (rubygem/activerecord-7.0.2.3.gem) = 473088 diff --git a/databases/sequeler/Makefile b/databases/sequeler/Makefile index 5cc0afaecc0..fe0fa6177ea 100644 --- a/databases/sequeler/Makefile +++ b/databases/sequeler/Makefile @@ -1,7 +1,7 @@ PORTNAME= sequeler DISTVERSIONPREFIX= v DISTVERSION= 0.8.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= databases MAINTAINER= ports@FreeBSD.org diff --git a/databases/spatialite-tools/Makefile b/databases/spatialite-tools/Makefile index bc54d41efac..7cbd4fa9fdf 100644 --- a/databases/spatialite-tools/Makefile +++ b/databases/spatialite-tools/Makefile @@ -1,5 +1,6 @@ PORTNAME= spatialite-tools PORTVERSION= 5.0.1 +PORTREVISION= 1 CATEGORIES= databases geography MASTER_SITES= http://www.gaia-gis.it/gaia-sins/ diff --git a/databases/spatialite/Makefile b/databases/spatialite/Makefile index 45c99d7df53..5b838fc8bb7 100644 --- a/databases/spatialite/Makefile +++ b/databases/spatialite/Makefile @@ -2,6 +2,7 @@ PORTNAME= spatialite PORTVERSION= 5.0.1 +PORTREVISION= 1 CATEGORIES= databases geography MASTER_SITES= http://www.gaia-gis.it/gaia-sins/libspatialite-sources/ DISTNAME= lib${PORTNAME}-${PORTVERSION} diff --git a/databases/spatialite_gui/Makefile b/databases/spatialite_gui/Makefile index ad434245fcf..2423d827ab3 100644 --- a/databases/spatialite_gui/Makefile +++ b/databases/spatialite_gui/Makefile @@ -2,6 +2,7 @@ PORTNAME= spatialite_gui DISTVERSION= 2.1.0 +PORTREVISION= 1 DISTVERSIONSUFFIX= -beta1 CATEGORIES= databases geography MASTER_SITES= http://www.gaia-gis.it/gaia-sins/spatialite-gui-sources/ diff --git a/databases/sqlitebrowser/Makefile b/databases/sqlitebrowser/Makefile index 8749c80a6ea..7264c45c930 100644 --- a/databases/sqlitebrowser/Makefile +++ b/databases/sqlitebrowser/Makefile @@ -2,7 +2,7 @@ PORTNAME= sqlitebrowser PORTVERSION= 3.12.1 -PORTREVISION= 6 +PORTREVISION= 7 DISTVERSIONPREFIX= v CATEGORIES= databases diff --git a/databases/sqliteodbc/Makefile b/databases/sqliteodbc/Makefile index 6354ee18a22..545cabcba9c 100644 --- a/databases/sqliteodbc/Makefile +++ b/databases/sqliteodbc/Makefile @@ -1,6 +1,6 @@ PORTNAME= sqliteodbc PORTVERSION= 0.9998 -PORTREVISION?= 0 +PORTREVISION?= 1 CATEGORIES= databases MASTER_SITES= http://www.ch-werner.de/sqliteodbc/ diff --git a/databases/tiledb/Makefile b/databases/tiledb/Makefile index 23891a48592..e3a3b44de0d 100644 --- a/databases/tiledb/Makefile +++ b/databases/tiledb/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= tiledb -PORTVERSION= 2.6.2 +PORTVERSION= 2.6.4 CATEGORIES= databases MAINTAINER= sunpoet@FreeBSD.org diff --git a/databases/tiledb/distinfo b/databases/tiledb/distinfo index bf96675b444..77bc5048b62 100644 --- a/databases/tiledb/distinfo +++ b/databases/tiledb/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643970976 -SHA256 (TileDB-Inc-TileDB-2.6.2_GH0.tar.gz) = 6974dd4e53edabda75cc8fa6b5772c71d6adb3eaafa76397e0542b3606f6f783 -SIZE (TileDB-Inc-TileDB-2.6.2_GH0.tar.gz) = 2137413 +TIMESTAMP = 1647264424 +SHA256 (TileDB-Inc-TileDB-2.6.4_GH0.tar.gz) = ef1a3b1e1c380994c2ca3e4b2691ef7a38319b181bde286075c09d0556bd45a9 +SIZE (TileDB-Inc-TileDB-2.6.4_GH0.tar.gz) = 2138264 diff --git a/databases/virtuoso/Makefile b/databases/virtuoso/Makefile index 61fec305097..7572488bf39 100644 --- a/databases/virtuoso/Makefile +++ b/databases/virtuoso/Makefile @@ -2,7 +2,7 @@ PORTNAME= virtuoso PORTVERSION= 7.2.5 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= databases MASTER_SITES= SF DISTNAME= ${PORTNAME}-opensource-${PORTVERSION} diff --git a/databases/xrootd/Makefile b/databases/xrootd/Makefile index 313a19dfb77..aa336330d7d 100644 --- a/databases/xrootd/Makefile +++ b/databases/xrootd/Makefile @@ -1,6 +1,6 @@ PORTNAME= xrootd DISTVERSION= 4.10.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= databases MASTER_SITES= http://xrootd.org/download/v${DISTVERSION}/ diff --git a/deskutils/alarm-clock-applet/Makefile b/deskutils/alarm-clock-applet/Makefile index 64a09fdbf1c..0455a09ce43 100644 --- a/deskutils/alarm-clock-applet/Makefile +++ b/deskutils/alarm-clock-applet/Makefile @@ -1,5 +1,6 @@ PORTNAME= alarm-clock-applet PORTVERSION= 0.3.4 +PORTREVISION= 1 CATEGORIES= deskutils MASTER_SITES= http://launchpad.net/alarm-clock/trunk/0.3.4/+download/ diff --git a/deskutils/bijiben/Makefile b/deskutils/bijiben/Makefile index 0f96f2bae70..9ead8c3fe34 100644 --- a/deskutils/bijiben/Makefile +++ b/deskutils/bijiben/Makefile @@ -2,7 +2,7 @@ PORTNAME= bijiben PORTVERSION= 40.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/deskutils/bookworm/Makefile b/deskutils/bookworm/Makefile index d18d3294607..258845d07a6 100644 --- a/deskutils/bookworm/Makefile +++ b/deskutils/bookworm/Makefile @@ -1,6 +1,6 @@ PORTNAME= bookworm DISTVERSION= 1.1.2 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= deskutils MAINTAINER= miguel@gocobachi.dev diff --git a/deskutils/cairo-dock-plugins/Makefile b/deskutils/cairo-dock-plugins/Makefile index 31d5047fa40..6a0f2ee9798 100644 --- a/deskutils/cairo-dock-plugins/Makefile +++ b/deskutils/cairo-dock-plugins/Makefile @@ -1,6 +1,6 @@ PORTNAME= cairo-dock-plugins PORTVERSION= 3.4.1 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= deskutils PATCH_SITES= https://github.com/Cairo-Dock/cairo-dock-plug-ins/commit/ diff --git a/deskutils/cairo-dock/Makefile b/deskutils/cairo-dock/Makefile index 31e6c29b35f..761c447ec71 100644 --- a/deskutils/cairo-dock/Makefile +++ b/deskutils/cairo-dock/Makefile @@ -1,6 +1,6 @@ PORTNAME= cairo-dock PORTVERSION= 3.4.1 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= deskutils MAINTAINER= ports@FreeBSD.org diff --git a/deskutils/easystroke/Makefile b/deskutils/easystroke/Makefile index df88c83ee97..b01adf00df1 100644 --- a/deskutils/easystroke/Makefile +++ b/deskutils/easystroke/Makefile @@ -1,6 +1,6 @@ PORTNAME= easystroke PORTVERSION= 0.6.0 -PORTREVISION= 22 +PORTREVISION= 23 CATEGORIES= deskutils MASTER_SITES= SF diff --git a/deskutils/elementary-calendar/Makefile b/deskutils/elementary-calendar/Makefile index a4118f159d9..16e9bb1bb12 100644 --- a/deskutils/elementary-calendar/Makefile +++ b/deskutils/elementary-calendar/Makefile @@ -1,5 +1,6 @@ PORTNAME= calendar DISTVERSION= 6.1.0 +PORTREVISION= 1 CATEGORIES= deskutils PKGNAMEPREFIX= elementary- diff --git a/deskutils/genius/Makefile b/deskutils/genius/Makefile index 0b73ea9110f..a67de8de1dc 100644 --- a/deskutils/genius/Makefile +++ b/deskutils/genius/Makefile @@ -2,7 +2,7 @@ PORTNAME= genius DISTVERSION= 1.0.27 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= deskutils gnome MASTER_SITES= GNOME \ http://ftp.5z.com/pub/genius/ diff --git a/deskutils/gnome-calendar/Makefile b/deskutils/gnome-calendar/Makefile index 4a5c727b1e4..2a44d126f06 100644 --- a/deskutils/gnome-calendar/Makefile +++ b/deskutils/gnome-calendar/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-calendar PORTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/deskutils/gnome-contacts/Makefile b/deskutils/gnome-contacts/Makefile index 76ca8f5aa30..b9f8fb0ba21 100644 --- a/deskutils/gnome-contacts/Makefile +++ b/deskutils/gnome-contacts/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-contacts PORTVERSION= 41.0 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} diff --git a/deskutils/gnome-dictionary/Makefile b/deskutils/gnome-dictionary/Makefile index 33f93dcabb8..880ce34a9c7 100644 --- a/deskutils/gnome-dictionary/Makefile +++ b/deskutils/gnome-dictionary/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-dictionary PORTVERSION= 40.0 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome3 diff --git a/deskutils/gnome-documents/Makefile b/deskutils/gnome-documents/Makefile index bc5f5cbf23e..46da357efcf 100644 --- a/deskutils/gnome-documents/Makefile +++ b/deskutils/gnome-documents/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnome-documents PORTVERSION= 3.34.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= deskutils gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/deskutils/gnome-initial-setup/Makefile b/deskutils/gnome-initial-setup/Makefile index 5d49fffdc09..66b2b050f92 100644 --- a/deskutils/gnome-initial-setup/Makefile +++ b/deskutils/gnome-initial-setup/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-initial-setup PORTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome3 @@ -20,7 +21,7 @@ LIB_DEPENDS= libaccountsservice.so:sysutils/accountsservice \ RUN_DEPENDS= gdm>=3.0.0:x11/gdm \ krb5>0:security/krb5 -USES= compiler:c++11-lib gnome meson pathfix pkgconfig tar:xz +USES= compiler:c++11-lib gettext gnome meson pathfix pkgconfig tar:xz USE_GNOME= gnomedesktop3 gtk30 intlhack MESON_ARGS= -Dcheese=disabled \ diff --git a/deskutils/gnome-initial-setup/pkg-plist b/deskutils/gnome-initial-setup/pkg-plist index eb719ed14df..7db977dbfc3 100644 --- a/deskutils/gnome-initial-setup/pkg-plist +++ b/deskutils/gnome-initial-setup/pkg-plist @@ -1,5 +1,81 @@ +etc/xdg/autostart/gnome-initial-setup-copy-worker.desktop +etc/xdg/autostart/gnome-initial-setup-first-login.desktop libexec/gnome-initial-setup libexec/gnome-initial-setup-copy-worker +share/applications/gnome-initial-setup.desktop share/gnome-session/sessions/gnome-initial-setup.session share/gnome-shell/modes/initial-setup.json +share/locale/af/LC_MESSAGES/gnome-initial-setup.mo +share/locale/an/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ar/LC_MESSAGES/gnome-initial-setup.mo +share/locale/as/LC_MESSAGES/gnome-initial-setup.mo +share/locale/be/LC_MESSAGES/gnome-initial-setup.mo +share/locale/bg/LC_MESSAGES/gnome-initial-setup.mo +share/locale/bn_IN/LC_MESSAGES/gnome-initial-setup.mo +share/locale/bs/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ca/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ca@valencia/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ckb/LC_MESSAGES/gnome-initial-setup.mo +share/locale/cs/LC_MESSAGES/gnome-initial-setup.mo +share/locale/da/LC_MESSAGES/gnome-initial-setup.mo +share/locale/de/LC_MESSAGES/gnome-initial-setup.mo +share/locale/el/LC_MESSAGES/gnome-initial-setup.mo +share/locale/en_GB/LC_MESSAGES/gnome-initial-setup.mo +share/locale/eo/LC_MESSAGES/gnome-initial-setup.mo +share/locale/es/LC_MESSAGES/gnome-initial-setup.mo +share/locale/et/LC_MESSAGES/gnome-initial-setup.mo +share/locale/eu/LC_MESSAGES/gnome-initial-setup.mo +share/locale/fa/LC_MESSAGES/gnome-initial-setup.mo +share/locale/fi/LC_MESSAGES/gnome-initial-setup.mo +share/locale/fr/LC_MESSAGES/gnome-initial-setup.mo +share/locale/fur/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ga/LC_MESSAGES/gnome-initial-setup.mo +share/locale/gd/LC_MESSAGES/gnome-initial-setup.mo +share/locale/gl/LC_MESSAGES/gnome-initial-setup.mo +share/locale/gu/LC_MESSAGES/gnome-initial-setup.mo +share/locale/he/LC_MESSAGES/gnome-initial-setup.mo +share/locale/hi/LC_MESSAGES/gnome-initial-setup.mo +share/locale/hr/LC_MESSAGES/gnome-initial-setup.mo +share/locale/hu/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ia/LC_MESSAGES/gnome-initial-setup.mo +share/locale/id/LC_MESSAGES/gnome-initial-setup.mo +share/locale/is/LC_MESSAGES/gnome-initial-setup.mo +share/locale/it/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ja/LC_MESSAGES/gnome-initial-setup.mo +share/locale/kk/LC_MESSAGES/gnome-initial-setup.mo +share/locale/kn/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ko/LC_MESSAGES/gnome-initial-setup.mo +share/locale/lt/LC_MESSAGES/gnome-initial-setup.mo +share/locale/lv/LC_MESSAGES/gnome-initial-setup.mo +share/locale/mjw/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ml/LC_MESSAGES/gnome-initial-setup.mo +share/locale/mr/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ms/LC_MESSAGES/gnome-initial-setup.mo +share/locale/nb/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ne/LC_MESSAGES/gnome-initial-setup.mo +share/locale/nl/LC_MESSAGES/gnome-initial-setup.mo +share/locale/oc/LC_MESSAGES/gnome-initial-setup.mo +share/locale/or/LC_MESSAGES/gnome-initial-setup.mo +share/locale/pa/LC_MESSAGES/gnome-initial-setup.mo +share/locale/pl/LC_MESSAGES/gnome-initial-setup.mo +share/locale/pt/LC_MESSAGES/gnome-initial-setup.mo +share/locale/pt_BR/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ro/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ru/LC_MESSAGES/gnome-initial-setup.mo +share/locale/sk/LC_MESSAGES/gnome-initial-setup.mo +share/locale/sl/LC_MESSAGES/gnome-initial-setup.mo +share/locale/sr/LC_MESSAGES/gnome-initial-setup.mo +share/locale/sr@latin/LC_MESSAGES/gnome-initial-setup.mo +share/locale/sv/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ta/LC_MESSAGES/gnome-initial-setup.mo +share/locale/te/LC_MESSAGES/gnome-initial-setup.mo +share/locale/tg/LC_MESSAGES/gnome-initial-setup.mo +share/locale/th/LC_MESSAGES/gnome-initial-setup.mo +share/locale/tr/LC_MESSAGES/gnome-initial-setup.mo +share/locale/ug/LC_MESSAGES/gnome-initial-setup.mo +share/locale/uk/LC_MESSAGES/gnome-initial-setup.mo +share/locale/vi/LC_MESSAGES/gnome-initial-setup.mo +share/locale/zh_CN/LC_MESSAGES/gnome-initial-setup.mo +share/locale/zh_HK/LC_MESSAGES/gnome-initial-setup.mo +share/locale/zh_TW/LC_MESSAGES/gnome-initial-setup.mo share/polkit-1/rules.d/20-gnome-initial-setup.rules diff --git a/deskutils/gnome-maps/Makefile b/deskutils/gnome-maps/Makefile index 6dfb955e233..7e399047a69 100644 --- a/deskutils/gnome-maps/Makefile +++ b/deskutils/gnome-maps/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-maps DISTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/deskutils/gnome-photos/Makefile b/deskutils/gnome-photos/Makefile index 7ba560a8ea4..0b299b94511 100644 --- a/deskutils/gnome-photos/Makefile +++ b/deskutils/gnome-photos/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-photos DISTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/deskutils/gnome-todo/Makefile b/deskutils/gnome-todo/Makefile index c00bd6cc1ae..1362b4e3000 100644 --- a/deskutils/gnome-todo/Makefile +++ b/deskutils/gnome-todo/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-todo PORTVERSION= 41.0 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome3 diff --git a/deskutils/gnome-tweaks/Makefile b/deskutils/gnome-tweaks/Makefile index 75bd054c10b..9fe908e1385 100644 --- a/deskutils/gnome-tweaks/Makefile +++ b/deskutils/gnome-tweaks/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnome-tweaks PORTVERSION= 40.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} diff --git a/deskutils/gnote/Makefile b/deskutils/gnote/Makefile index 282fc00d210..6b122ca5727 100644 --- a/deskutils/gnote/Makefile +++ b/deskutils/gnote/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnote PORTVERSION= 41.2 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} diff --git a/deskutils/growl-for-linux/Makefile b/deskutils/growl-for-linux/Makefile index 74e81844546..ec44a4c0b34 100644 --- a/deskutils/growl-for-linux/Makefile +++ b/deskutils/growl-for-linux/Makefile @@ -1,6 +1,6 @@ PORTNAME= growl-for-linux PORTVERSION= 0.8.5 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= deskutils MAINTAINER= nivit@FreeBSD.org diff --git a/deskutils/gxneur/Makefile b/deskutils/gxneur/Makefile index 2243bbb9a37..0b1eb26880b 100644 --- a/deskutils/gxneur/Makefile +++ b/deskutils/gxneur/Makefile @@ -2,6 +2,7 @@ PORTNAME= gxneur PORTVERSION= 0.20.0 +PORTREVISION= 1 DISTVERSIONSUFFIX=.orig CATEGORIES= deskutils MASTER_SITES= https://launchpad.net/~andrew-crew-kuznetsov/+archive/xneur-stable/+files/ diff --git a/deskutils/lookbook/Makefile b/deskutils/lookbook/Makefile index 5f119bffad4..8572ea59f5d 100644 --- a/deskutils/lookbook/Makefile +++ b/deskutils/lookbook/Makefile @@ -1,6 +1,6 @@ PORTNAME= lookbook DISTVERSION= 1.2.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= deskutils MAINTAINER= greg@unrelenting.technology diff --git a/deskutils/notekit/Makefile b/deskutils/notekit/Makefile index 34a4218939f..9cd6e64b51d 100644 --- a/deskutils/notekit/Makefile +++ b/deskutils/notekit/Makefile @@ -2,6 +2,7 @@ PORTNAME= notekit PORTVERSION= g20211222 +PORTREVISION= 1 CATEGORIES= deskutils MAINTAINER= danfe@FreeBSD.org diff --git a/deskutils/osmo/Makefile b/deskutils/osmo/Makefile index a8d380b5ca2..c37c093d1a4 100644 --- a/deskutils/osmo/Makefile +++ b/deskutils/osmo/Makefile @@ -2,7 +2,7 @@ PORTNAME= osmo PORTVERSION= 0.4.4 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= deskutils MASTER_SITES= SF/${PORTNAME}-pim/${PORTNAME}-pim/${PORTNAME}-${PORTVERSION} diff --git a/deskutils/pinot/Makefile b/deskutils/pinot/Makefile index 0c200e80cd0..787b2cdd1c0 100644 --- a/deskutils/pinot/Makefile +++ b/deskutils/pinot/Makefile @@ -2,6 +2,7 @@ PORTNAME= pinot PORTVERSION= 1.21 +PORTREVISION= 1 CATEGORIES= deskutils MAINTAINER= thierry@FreeBSD.org diff --git a/deskutils/py-autokey/Makefile b/deskutils/py-autokey/Makefile index 75feec28368..0106f06904c 100644 --- a/deskutils/py-autokey/Makefile +++ b/deskutils/py-autokey/Makefile @@ -1,7 +1,7 @@ PORTNAME= autokey DISTVERSIONPREFIX= v DISTVERSION= 0.95.10 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= deskutils python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/deskutils/py-py3status/Makefile b/deskutils/py-py3status/Makefile index fdae7dfbd2d..26d09d9e01b 100644 --- a/deskutils/py-py3status/Makefile +++ b/deskutils/py-py3status/Makefile @@ -1,7 +1,7 @@ # Created by: Martin Wilke PORTNAME= py3status -PORTVERSION= 3.40 +PORTVERSION= 3.41 CATEGORIES= deskutils python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/deskutils/py-py3status/distinfo b/deskutils/py-py3status/distinfo index 7096ef3ac25..cc01bd1c149 100644 --- a/deskutils/py-py3status/distinfo +++ b/deskutils/py-py3status/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637072938 -SHA256 (py3status-3.40.tar.gz) = 9eb6f721f94f28a17a8599ca2743a2bedd58c16cfe74e9817ffa948c13dbb79c -SIZE (py3status-3.40.tar.gz) = 390345 +TIMESTAMP = 1647264508 +SHA256 (py3status-3.41.tar.gz) = d86fb994a90bb6067fda0854d315530ea2176d337291820a462c1989a82eb287 +SIZE (py3status-3.41.tar.gz) = 389598 diff --git a/deskutils/py-pystash/files/patch-2to3 b/deskutils/py-pystash/files/patch-2to3 new file mode 100644 index 00000000000..5accd5cd1de --- /dev/null +++ b/deskutils/py-pystash/files/patch-2to3 @@ -0,0 +1,50 @@ +--- pystash/common.py.orig 2014-05-17 19:30:11 UTC ++++ pystash/common.py +@@ -79,12 +79,11 @@ class StashedItem(): + return '\n'.join(items) + + +-class AbstractStorage(object): ++class AbstractStorage(object, metaclass=abc.ABCMeta): + # todo: update methods signature + """ + Here will be a docstring + """ +- __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def get_connection(self, db): +@@ -140,7 +139,7 @@ class ShelveStorage(AbstractStorage): + self.DBFILE = db_file if db_file is not None else self.DBFILE + path_to_dir = os.path.join('/', *self.DBFILE.split('/')[1:-1]) + if not os.path.exists(path_to_dir): +- os.makedirs(path_to_dir, 0755) ++ os.makedirs(path_to_dir, 0o755) + self.connection = self.get_connection(self.DBFILE) + if not 'storage' in self.connection: + self.connection['storage'] = {} +@@ -224,13 +223,13 @@ class ShelveStorage(AbstractStorage): + + def get_all(self): + result = {} +- for k, v in self.db.iteritems(): ++ for k, v in self.db.items(): + result[k] = StashedItem(v) + return result + + def tags(self, tag): + result = {} +- for k, v in self.db.iteritems(): ++ for k, v in self.db.items(): + if 'tags' in v: + if tag in v['tags']: + result[k] = StashedItem(v) +@@ -238,7 +237,7 @@ class ShelveStorage(AbstractStorage): + + def alltags(self): + result = [] +- for k, v in self.db.iteritems(): ++ for k, v in self.db.items(): + if 'tags' in v: + for tag in v['tags']: + result.append(tag) diff --git a/deskutils/recoll/Makefile b/deskutils/recoll/Makefile index 46b67457f4c..f80f571451f 100644 --- a/deskutils/recoll/Makefile +++ b/deskutils/recoll/Makefile @@ -1,5 +1,6 @@ PORTNAME= recoll PORTVERSION= 1.31.4 +PORTREVISION= 1 CATEGORIES= deskutils MASTER_SITES= https://www.lesbonscomptes.com/recoll/ diff --git a/deskutils/rubrica/Makefile b/deskutils/rubrica/Makefile index 050fffa7648..70a8586331b 100644 --- a/deskutils/rubrica/Makefile +++ b/deskutils/rubrica/Makefile @@ -2,6 +2,7 @@ PORTNAME= rubrica PORTVERSION= 2.1.6 +PORTREVISION= 1 CATEGORIES= deskutils gnome MASTER_SITES= BERLIOS DISTNAME= ${PORTNAME}2-${PORTVERSION} diff --git a/deskutils/synapse/Makefile b/deskutils/synapse/Makefile index d74a84eb82d..84bc122420c 100644 --- a/deskutils/synapse/Makefile +++ b/deskutils/synapse/Makefile @@ -2,6 +2,7 @@ PORTNAME= synapse DISTVERSION= 0.2.99.4 +PORTREVISION= 1 CATEGORIES= deskutils MASTER_SITES= https://launchpadlibrarian.net/363823069/ diff --git a/deskutils/sysctlview/Makefile b/deskutils/sysctlview/Makefile index ac1fc95fa40..910d851eb14 100644 --- a/deskutils/sysctlview/Makefile +++ b/deskutils/sysctlview/Makefile @@ -1,5 +1,6 @@ PORTNAME= sysctlview PORTVERSION= 2.1 +PORTREVISION= 1 CATEGORIES= deskutils MAINTAINER= alfix86@gmail.com diff --git a/deskutils/taskwarrior/Makefile b/deskutils/taskwarrior/Makefile index 9127b4065ae..b0e4d5d0a75 100644 --- a/deskutils/taskwarrior/Makefile +++ b/deskutils/taskwarrior/Makefile @@ -1,7 +1,7 @@ # Created by: gahr PORTNAME= taskwarrior -PORTVERSION= 2.6.1 +PORTVERSION= 2.6.2 DISTVERSIONPREFIX= v CATEGORIES= deskutils diff --git a/deskutils/taskwarrior/distinfo b/deskutils/taskwarrior/distinfo index b436938593a..f65fdb4fb8c 100644 --- a/deskutils/taskwarrior/distinfo +++ b/deskutils/taskwarrior/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1634927357 -SHA256 (GothenburgBitFactory-taskwarrior-v2.6.1_GH0.tar.gz) = 5369a1fa706b8f4126d6dbe24b00b6e5f921296bc30cd088270544da9817d4ab -SIZE (GothenburgBitFactory-taskwarrior-v2.6.1_GH0.tar.gz) = 1343146 +TIMESTAMP = 1648256577 +SHA256 (GothenburgBitFactory-taskwarrior-v2.6.2_GH0.tar.gz) = 779ff60d104f189f9dbbaa40c95d2de45a53bb3bdeddeeb527eaaf3ae3ffbc5a +SIZE (GothenburgBitFactory-taskwarrior-v2.6.2_GH0.tar.gz) = 1345842 SHA256 (GothenburgBitFactory-libshared-8baf2db_GH0.tar.gz) = c93a30bf53a0751f10cc137c42bd05f1fa47477f2803ae45d684a14202238b63 SIZE (GothenburgBitFactory-libshared-8baf2db_GH0.tar.gz) = 161275 diff --git a/deskutils/virt-manager/Makefile b/deskutils/virt-manager/Makefile index b7c5bc64afc..299617ad182 100644 --- a/deskutils/virt-manager/Makefile +++ b/deskutils/virt-manager/Makefile @@ -1,5 +1,6 @@ PORTNAME= virt-manager PORTVERSION= 4.0.0 +PORTREVISION= 1 CATEGORIES= deskutils net-mgmt MASTER_SITES= https://virt-manager.org/download/sources/${PORTNAME}/ diff --git a/deskutils/xfce4-tumbler/Makefile b/deskutils/xfce4-tumbler/Makefile index ccbc47aeaee..98f2f1f6693 100644 --- a/deskutils/xfce4-tumbler/Makefile +++ b/deskutils/xfce4-tumbler/Makefile @@ -2,7 +2,7 @@ PORTNAME= tumbler PORTVERSION= 4.16.0 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= deskutils xfce MASTER_SITES= XFCE PKGNAMEPREFIX= xfce4- diff --git a/deskutils/xpad/Makefile b/deskutils/xpad/Makefile index bb51a25dc85..189e7268a6a 100644 --- a/deskutils/xpad/Makefile +++ b/deskutils/xpad/Makefile @@ -2,6 +2,7 @@ PORTNAME= xpad PORTVERSION= 5.4.0 +PORTREVISION= 1 CATEGORIES= deskutils MASTER_SITES= https://launchpad.net/${PORTNAME}/trunk/${PORTVERSION}/+download/ diff --git a/devel/Makefile b/devel/Makefile index 8b4793736c2..02d2ffa2806 100644 --- a/devel/Makefile +++ b/devel/Makefile @@ -4146,6 +4146,7 @@ SUBDIR += py-aiorpcX SUBDIR += py-aiorpcX-legacy SUBDIR += py-aiortc + SUBDIR += py-aiosignal SUBDIR += py-amalgamate SUBDIR += py-aniso8601 SUBDIR += py-aniso86016 @@ -4178,7 +4179,6 @@ SUBDIR += py-astunparse SUBDIR += py-async_generator SUBDIR += py-async_timeout - SUBDIR += py-asyncio SUBDIR += py-asynctest SUBDIR += py-atomiclong SUBDIR += py-atomicwrites @@ -4490,6 +4490,7 @@ SUBDIR += py-enum-compat SUBDIR += py-envisage SUBDIR += py-envs + SUBDIR += py-epc SUBDIR += py-epdb SUBDIR += py-epsilon SUBDIR += py-evdev @@ -4827,6 +4828,7 @@ SUBDIR += py-nose-timer SUBDIR += py-nose2 SUBDIR += py-nosexcover + SUBDIR += py-notebook-shim SUBDIR += py-notify2 SUBDIR += py-num2words SUBDIR += py-numba @@ -5103,6 +5105,7 @@ SUBDIR += py-pytest-rerunfailures SUBDIR += py-pytest-runner SUBDIR += py-pytest-shutil + SUBDIR += py-pytest-subtests SUBDIR += py-pytest-sugar SUBDIR += py-pytest-timeout SUBDIR += py-pytest-tornado @@ -5854,6 +5857,7 @@ SUBDIR += rubygem-aws-sdk-kafka SUBDIR += rubygem-aws-sdk-kafkaconnect SUBDIR += rubygem-aws-sdk-kendra + SUBDIR += rubygem-aws-sdk-keyspaces SUBDIR += rubygem-aws-sdk-kinesis SUBDIR += rubygem-aws-sdk-kinesisanalytics SUBDIR += rubygem-aws-sdk-kinesisanalyticsv2 @@ -6301,6 +6305,7 @@ SUBDIR += rubygem-equatable SUBDIR += rubygem-erb SUBDIR += rubygem-errand + SUBDIR += rubygem-error_highlight SUBDIR += rubygem-et-orbi SUBDIR += rubygem-etc SUBDIR += rubygem-event_emitter @@ -6661,6 +6666,7 @@ SUBDIR += rubygem-puppet-resource_api SUBDIR += rubygem-pygments.rb SUBDIR += rubygem-que + SUBDIR += rubygem-que-scheduler SUBDIR += rubygem-r18n-core SUBDIR += rubygem-r18n-desktop SUBDIR += rubygem-r18n-rails @@ -6786,7 +6792,11 @@ SUBDIR += rubygem-semi_semantic SUBDIR += rubygem-semver2 SUBDIR += rubygem-semverse + SUBDIR += rubygem-sentry-rails SUBDIR += rubygem-sentry-raven + SUBDIR += rubygem-sentry-ruby + SUBDIR += rubygem-sentry-ruby-core + SUBDIR += rubygem-sentry-sidekiq SUBDIR += rubygem-sequel SUBDIR += rubygem-set SUBDIR += rubygem-settingslogic @@ -6946,6 +6956,7 @@ SUBDIR += rubygem-validate_url SUBDIR += rubygem-validates_timeliness SUBDIR += rubygem-versionomy + SUBDIR += rubygem-view_component-rails61 SUBDIR += rubygem-virtus SUBDIR += rubygem-warbler SUBDIR += rubygem-warden diff --git a/devel/R-cran-Rdpack/Makefile b/devel/R-cran-Rdpack/Makefile index 12cf292f04f..714562c25ef 100644 --- a/devel/R-cran-Rdpack/Makefile +++ b/devel/R-cran-Rdpack/Makefile @@ -1,5 +1,5 @@ PORTNAME= Rdpack -DISTVERSION= 2.2 +DISTVERSION= 2.3 CATEGORIES= devel DISTNAME= ${PORTNAME}_${DISTVERSION} diff --git a/devel/R-cran-Rdpack/distinfo b/devel/R-cran-Rdpack/distinfo index e8ad46d47c4..f498e81a1c7 100644 --- a/devel/R-cran-Rdpack/distinfo +++ b/devel/R-cran-Rdpack/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647791727 -SHA256 (Rdpack_2.2.tar.gz) = b74d20a1637bcded931c40c1770fcdba0327b1204965feee57c080474d907dea -SIZE (Rdpack_2.2.tar.gz) = 805419 +TIMESTAMP = 1648269901 +SHA256 (Rdpack_2.3.tar.gz) = c45e1ab8352b92ce03f26ece1f4db3716959fca2af9e826d5bd3c76b2151f7c5 +SIZE (Rdpack_2.3.tar.gz) = 702728 diff --git a/devel/R-cran-fansi/Makefile b/devel/R-cran-fansi/Makefile index 093035a7ab0..795e663761e 100644 --- a/devel/R-cran-fansi/Makefile +++ b/devel/R-cran-fansi/Makefile @@ -1,5 +1,5 @@ PORTNAME= fansi -PORTVERSION= 1.0.2 +PORTVERSION= 1.0.3 CATEGORIES= devel DISTNAME= ${PORTNAME}_${PORTVERSION} diff --git a/devel/R-cran-fansi/distinfo b/devel/R-cran-fansi/distinfo index adeda91e833..75ae8a83708 100644 --- a/devel/R-cran-fansi/distinfo +++ b/devel/R-cran-fansi/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642316756 -SHA256 (fansi_1.0.2.tar.gz) = d1e2cf2e10613abe19071e3dab7c564ebcf85ad13cbee25fa1999267af01b557 -SIZE (fansi_1.0.2.tar.gz) = 480902 +TIMESTAMP = 1648345697 +SHA256 (fansi_1.0.3.tar.gz) = 86a7b83d8c9d28baebbde310cd0b459d0950a9c7ff1a6276ce5858f6a89bc06a +SIZE (fansi_1.0.3.tar.gz) = 481163 diff --git a/devel/R-cran-plyr/Makefile b/devel/R-cran-plyr/Makefile index 1d3420789ab..c6b3e764aa1 100644 --- a/devel/R-cran-plyr/Makefile +++ b/devel/R-cran-plyr/Makefile @@ -1,19 +1,14 @@ # Created by: TAKATSU Tomonari PORTNAME= plyr -PORTVERSION= 1.8.6 -PORTREVISION= 1 +PORTVERSION= 1.8.7 CATEGORIES= devel DISTNAME= ${PORTNAME}_${DISTVERSION} MAINTAINER= tota@FreeBSD.org COMMENT= Tools for splitting, applying, and combining data -LICENSE= MIT HW -LICENSE_COMB= multi -LICENSE_NAME_HW= Hadley Wickham COPYRIGHT -LICENSE_FILE_HW= ${WRKSRC}/LICENSE -LICENSE_PERMS_HW= dist-mirror dist-sell pkg-mirror pkg-sell auto-accept +LICENSE= MIT CRAN_DEPENDS= R-cran-Rcpp>=0.11.0:devel/R-cran-Rcpp BUILD_DEPENDS= ${CRAN_DEPENDS} diff --git a/devel/R-cran-plyr/distinfo b/devel/R-cran-plyr/distinfo index 4980b17d7c2..eb4499c9374 100644 --- a/devel/R-cran-plyr/distinfo +++ b/devel/R-cran-plyr/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1585866142 -SHA256 (plyr_1.8.6.tar.gz) = ea55d26f155443e9774769531daa5d4c20a0697bb53abd832e891b126c935287 -SIZE (plyr_1.8.6.tar.gz) = 401191 +TIMESTAMP = 1648360136 +SHA256 (plyr_1.8.7.tar.gz) = 7d9fdaf1157035a49c3661da3bbaa7bfcf782aafe1b98f7b5a68b0520046e87f +SIZE (plyr_1.8.7.tar.gz) = 401481 diff --git a/devel/aegis/Makefile b/devel/aegis/Makefile index 1722b91fd9c..07ee8e55fcc 100644 --- a/devel/aegis/Makefile +++ b/devel/aegis/Makefile @@ -3,7 +3,7 @@ PORTNAME= aegis PORTVERSION= 4.25 DISTVERSIONSUFFIX= .D510 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= devel MASTER_SITES= SF diff --git a/devel/anjuta/Makefile b/devel/anjuta/Makefile index e790e5b7c8e..bdbdb8cf569 100644 --- a/devel/anjuta/Makefile +++ b/devel/anjuta/Makefile @@ -2,7 +2,7 @@ PORTNAME= anjuta PORTVERSION= 3.34.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/devel/appstream/Makefile b/devel/appstream/Makefile index 41a6911719f..4732e68e57b 100644 --- a/devel/appstream/Makefile +++ b/devel/appstream/Makefile @@ -1,5 +1,6 @@ PORTNAME= AppStream DISTVERSION= 0.15.2 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= https://www.freedesktop.org/software/${PORTNAME:tl}/releases/ diff --git a/devel/autogen/Makefile b/devel/autogen/Makefile index b599a5de6ff..6e7824c8eb9 100644 --- a/devel/autogen/Makefile +++ b/devel/autogen/Makefile @@ -2,7 +2,7 @@ PORTNAME= autogen PORTVERSION= 5.18.16 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel MASTER_SITES= GNU/${PORTNAME}/rel${PORTVERSION} diff --git a/devel/caf/Makefile b/devel/caf/Makefile index dd2184c56ff..e2dd757a789 100644 --- a/devel/caf/Makefile +++ b/devel/caf/Makefile @@ -1,7 +1,7 @@ # Created by: vanilla@ PORTNAME= caf -PORTVERSION= 0.18.5 +PORTVERSION= 0.18.6 CATEGORIES= devel MAINTAINER= ports@FreeBSD.org diff --git a/devel/caf/distinfo b/devel/caf/distinfo index 786936c3384..05730652e11 100644 --- a/devel/caf/distinfo +++ b/devel/caf/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1628143309 -SHA256 (actor-framework-actor-framework-0.18.5_GH0.tar.gz) = 4c96f896f000218bb65890b4d7175451834add73750d5f33b0c7fe82b7d5a679 -SIZE (actor-framework-actor-framework-0.18.5_GH0.tar.gz) = 2827181 +TIMESTAMP = 1648323549 +SHA256 (actor-framework-actor-framework-0.18.6_GH0.tar.gz) = c2ead63a0322d992fea8813a7f7d15b4d16cbb8bbe026722f2616a79109b91cc +SIZE (actor-framework-actor-framework-0.18.6_GH0.tar.gz) = 2824066 diff --git a/devel/caf/pkg-plist b/devel/caf/pkg-plist index 13294a56dc6..ef3fb7f728f 100644 --- a/devel/caf/pkg-plist +++ b/devel/caf/pkg-plist @@ -1,6 +1,7 @@ include/caf/abstract_actor.hpp include/caf/abstract_channel.hpp include/caf/abstract_group.hpp +include/caf/action.hpp include/caf/actor.hpp include/caf/actor_addr.hpp include/caf/actor_cast.hpp @@ -88,8 +89,8 @@ include/caf/detail/consumer.hpp include/caf/detail/core_export.hpp include/caf/detail/default_invoke_result_visitor.hpp include/caf/detail/delegate_serialize.hpp +include/caf/detail/dispose_on_call.hpp include/caf/detail/double_ended_queue.hpp -include/caf/detail/encode_base64.hpp include/caf/detail/enqueue_result.hpp include/caf/detail/functor_attachable.hpp include/caf/detail/gcd.hpp @@ -167,7 +168,6 @@ include/caf/detail/select_integer_type.hpp include/caf/detail/serialized_size.hpp include/caf/detail/set_thread_name.hpp include/caf/detail/shared_spinlock.hpp -include/caf/detail/simple_actor_clock.hpp include/caf/detail/size_based_credit_controller.hpp include/caf/detail/socket_guard.hpp include/caf/detail/spawn_fwd.hpp @@ -200,6 +200,7 @@ include/caf/detail/unordered_flat_map.hpp include/caf/detail/variant_data.hpp include/caf/detail/worker_hub.hpp include/caf/dictionary.hpp +include/caf/disposable.hpp include/caf/downstream.hpp include/caf/downstream_manager.hpp include/caf/downstream_manager_base.hpp @@ -342,20 +343,11 @@ include/caf/make_sink_result.hpp include/caf/make_source_result.hpp include/caf/make_stage_result.hpp include/caf/may_have_timeout.hpp -include/caf/memory_managed.hpp include/caf/message.hpp include/caf/message_builder.hpp include/caf/message_handler.hpp include/caf/message_id.hpp include/caf/message_priority.hpp -include/caf/meta/annotation.hpp -include/caf/meta/hex_formatted.hpp -include/caf/meta/load_callback.hpp -include/caf/meta/omittable.hpp -include/caf/meta/omittable_if_empty.hpp -include/caf/meta/omittable_if_none.hpp -include/caf/meta/save_callback.hpp -include/caf/meta/type_name.hpp include/caf/mixin/actor_widget.hpp include/caf/mixin/behavior_changer.hpp include/caf/mixin/requester.hpp @@ -506,6 +498,7 @@ lib/libcaf_openssl.so.%%PORTVERSION%% %%DATADIR%%/examples/aout.cpp %%DATADIR%%/examples/broker/simple_broker.cpp %%DATADIR%%/examples/broker/simple_http_broker.cpp +%%DATADIR%%/examples/config/read-json.cpp %%DATADIR%%/examples/custom_type/custom_types_1.cpp %%DATADIR%%/examples/custom_type/custom_types_2.cpp %%DATADIR%%/examples/custom_type/custom_types_3.cpp diff --git a/devel/codeville/Makefile b/devel/codeville/Makefile index d6c764a19c0..c2e6ee70ba5 100644 --- a/devel/codeville/Makefile +++ b/devel/codeville/Makefile @@ -15,7 +15,7 @@ BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}future>0:devel/py-future@${PY_FLAVOR} RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}future>0:devel/py-future@${PY_FLAVOR} USES= python:3.7+ tar:txz -USE_PYTHON= distutils autoplist +USE_PYTHON= distutils autoplist noflavors NO_ARCH= yes PORTEXAMPLES= cdvserver.conf.sample diff --git a/devel/codeville/files/patch-indent b/devel/codeville/files/patch-indent new file mode 100644 index 00000000000..78679d8335c --- /dev/null +++ b/devel/codeville/files/patch-indent @@ -0,0 +1,75 @@ +--- Codeville/SRP.py.orig 2020-12-15 07:22:56 UTC ++++ Codeville/SRP.py +@@ -34,7 +34,7 @@ class ImproperKeyValue(Exception): pass + def hash(s): + """Hash a value with some hashing algorithm.""" + if type(s) != type(''): +- s = long_to_string(s) ++ s = long_to_string(s) + + return sha.new(s).digest() + +@@ -85,7 +85,7 @@ def client_key(user, passphrase, s, B, u, keys, key_fu + # We don't trust the host. Perhaps the host is being spoofed. + + if B <= 0 or n <= B: +- raise ImproperKeyValue ++ raise ImproperKeyValue + + # Calculate the shared, secret session key. + +@@ -93,7 +93,7 @@ def client_key(user, passphrase, s, B, u, keys, key_fu + v = 3 * pow(g, x, n) + t = B + if t < v: +- t = t + n ++ t = t + n + S = pow(t - v, a + u * x, n) + K = hash(S) + +@@ -118,21 +118,21 @@ def host_begin(user, A, s, v): + # order to break the protocol. + + if A <= 0 or n <= A: +- raise ImproperKeyValue ++ raise ImproperKeyValue + + # Pick our random public keys. + + B = 0 + while B == 0: +- b = random_long(ablen) +- B = ((3*v) + pow(g, b, n)) % n ++ b = random_long(ablen) ++ B = ((3*v) + pow(g, b, n)) % n + u = pow(g, random_long(tlen), n) + + # Calculate the (private, shared secret) session key. + + t = (A * pow(v, u, n)) % n + if t <= 1 or t + 1 == n: +- raise ImproperKeyValue # WeakKeyValue -- could be our fault so retry ++ raise ImproperKeyValue # WeakKeyValue -- could be our fault so retry + S = pow(t, b, n) + K = hash(S) + +--- Codeville/entropy.py.orig 2020-12-15 07:23:19 UTC ++++ Codeville/entropy.py +@@ -31,14 +31,14 @@ def string_to_long(s): + """Convert a string of bytes into a long integer.""" + r = 0 + for c in s: +- r = (r << 8) + ord(c) ++ r = (r << 8) + ord(c) + return r + + def long_to_string(i, length=0): + """Convert a long integer into a string of bytes.""" + s = '' + while i > 0: +- s = chr(i & 255) + s +- i = i >> 8 ++ s = chr(i & 255) + s ++ i = i >> 8 + s = '\x00' * (length - len(s)) + s + return s diff --git a/devel/collada-dom/Makefile b/devel/collada-dom/Makefile index 82d1b31bcae..fafd0b140e9 100644 --- a/devel/collada-dom/Makefile +++ b/devel/collada-dom/Makefile @@ -1,7 +1,7 @@ PORTNAME= collada-dom DISTVERSIONPREFIX= v DISTVERSION= 2.5.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel MAINTAINER= trueos@norwegianrockcat.com diff --git a/devel/compiz-bcop/Makefile b/devel/compiz-bcop/Makefile index 0565136f220..113e5f4d015 100644 --- a/devel/compiz-bcop/Makefile +++ b/devel/compiz-bcop/Makefile @@ -2,6 +2,7 @@ PORTNAME= compiz-bcop PORTVERSION= 0.8.8 +PORTREVISION= 1 CATEGORIES= devel x11-wm MASTER_SITES= http://releases.compiz.org/${PORTVERSION}/ \ https://BSDforge.com/projects/source/devel/compiz-bcop/ diff --git a/devel/csoap/Makefile b/devel/csoap/Makefile index 30a7a83a43e..32acea09707 100644 --- a/devel/csoap/Makefile +++ b/devel/csoap/Makefile @@ -2,7 +2,7 @@ PORTNAME= csoap PORTVERSION= 1.1.0 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= devel net www MASTER_SITES= SF/${PORTNAME}/libsoap/${PORTVERSION} DISTNAME= libsoap-${PORTVERSION} diff --git a/devel/dconf/Makefile b/devel/dconf/Makefile index 99717169501..23d70abe738 100644 --- a/devel/dconf/Makefile +++ b/devel/dconf/Makefile @@ -2,7 +2,7 @@ PORTNAME= dconf PORTVERSION= 0.40.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel gnome MASTER_SITES= GNOME DISTNAME= dconf-${PORTVERSION} diff --git a/devel/dia2code+/Makefile b/devel/dia2code+/Makefile index 824f0372165..248a85457e7 100644 --- a/devel/dia2code+/Makefile +++ b/devel/dia2code+/Makefile @@ -2,7 +2,7 @@ PORTNAME= dia2code+ PORTVERSION= 1.0.0 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= devel MASTER_SITES= SF/dia2code/dia2code+/${PORTVERSION} DISTNAME= dia2code-${PORTVERSION} diff --git a/devel/dia2code/Makefile b/devel/dia2code/Makefile index b1486ed60c5..9b7dd246a11 100644 --- a/devel/dia2code/Makefile +++ b/devel/dia2code/Makefile @@ -2,6 +2,7 @@ PORTNAME= dia2code PORTVERSION= 0.8.8 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= SF diff --git a/devel/efl/Makefile b/devel/efl/Makefile index b72262b8ca6..75cb663dfc9 100644 --- a/devel/efl/Makefile +++ b/devel/efl/Makefile @@ -132,8 +132,12 @@ IBUS_LIB_DEPENDS= libibus-1.0.so:textproc/ibus IBUS_USES= gnome IBUS_USE= GNOME=glib20 IBUS_MESON_TRUE= glib +IBUS_VARS_OFF= ibus_disabler=ibus SCIM_LIB_DEPENDS= libscim-1.0.so:textproc/scim +SCIM_VARS_OFF= scim_disabler=scim + +XIM_VARS_OFF= xim_disabler=xim OPENJPEG_LIB_DEPENDS= libopenjp2.so:graphics/openjpeg @@ -187,32 +191,9 @@ SDL_MESON_TRUE= sdl .include -ECORE-IMF-LOADERS-DISABLER-BASE= -Decore-imf-loaders-disabler= -ECORE-IMF-LOADERS-DISABLER:= ${ECORE-IMF-LOADERS-DISABLER-BASE} +IMF_DISABLERS= ${IBUS_DISABLER} ${SCIM_DISABLER} ${XIM_DISABLER} -.if empty(PORT_OPTIONS:MIBUS) -ECORE-IMF-LOADERS-DISABLER:= "${ECORE-IMF-LOADERS-DISABLER-BASE}ibus" -.endif - -.if empty(PORT_OPTIONS:MSCIM) -.if ${ECORE-IMF-LOADERS-DISABLER} != ${ECORE-IMF-LOADERS-DISABLER-BASE} -ECORE-IMF-LOADERS-DISABLER-BASE:=${ECORE-IMF-LOADERS-DISABLER} -ECORE-IMF-LOADERS-DISABLER:= "${ECORE-IMF-LOADERS-DISABLER-BASE},scim" -.else -ECORE-IMF-LOADERS-DISABLER:= "${ECORE-IMF-LOADERS-DISABLER-BASE}scim" -.endif -.endif - -.if empty(PORT_OPTIONS:MXIM) -.if ${ECORE-IMF-LOADERS-DISABLER} != ${ECORE-IMF-LOADERS-DISABLER-BASE} -ECORE-IMF-LOADERS-DISABLER-BASE:=${ECORE-IMF-LOADERS-DISABLER} -ECORE-IMF-LOADERS-DISABLER:= "${ECORE-IMF-LOADERS-DISABLER-BASE},xim" -.else -ECORE-IMF-LOADERS-DISABLER:= "${ECORE-IMF-LOADERS-DISABLER-BASE}xim" -.endif -.endif - -MESON_ARGS+= ${ECORE-IMF-LOADERS-DISABLER} +MESON_ARGS+= -Decore-imf-loaders-disabler="${IMF_DISABLERS:ts,}" EVAS-LOADERS-DISABLER-BASE= -Devas-loaders-disabler= EVAS-LOADERS-DISABLER:= ${EVAS-LOADERS-DISABLER-BASE} diff --git a/devel/electron13/Makefile b/devel/electron13/Makefile index 23273c9214e..130c91bb0a1 100644 --- a/devel/electron13/Makefile +++ b/devel/electron13/Makefile @@ -1,6 +1,7 @@ PORTNAME= electron DISTVERSIONPREFIX= v DISTVERSION= ${ELECTRON_VER:S/-beta./.b/} +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= https://commondatastorage.googleapis.com/chromium-browser-official/:chromium \ https://commondatastorage.googleapis.com/chromium-fonts/:chromium_testfonts diff --git a/devel/flang-clang/Makefile b/devel/flang-clang/Makefile index caceffcb398..0a6a86565cf 100644 --- a/devel/flang-clang/Makefile +++ b/devel/flang-clang/Makefile @@ -2,6 +2,7 @@ PORTNAME= flang-clang DISTVERSION= 7.0-g20191020 +PORTREVISION= 1 CATEGORIES= devel lang MAINTAINER= jmd@FreeBSD.org diff --git a/devel/frama-c/Makefile b/devel/frama-c/Makefile index 80cedbdabab..f8ee8f37246 100644 --- a/devel/frama-c/Makefile +++ b/devel/frama-c/Makefile @@ -3,6 +3,7 @@ PORTNAME= frama-c DISTVERSIONPREFIX= Phosphorus- DISTVERSION= 20170501 +PORTREVISION= 1 CATEGORIES= devel lang MASTER_SITES= http://frama-c.com/download/ diff --git a/devel/gconf2/Makefile b/devel/gconf2/Makefile index 9e535cc824a..3bd42bee1ba 100644 --- a/devel/gconf2/Makefile +++ b/devel/gconf2/Makefile @@ -3,7 +3,7 @@ PORTNAME= gconf2 PORTVERSION= 3.2.6 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= devel gnome MASTER_SITES= GNOME/sources/GConf/${PORTVERSION:C/^([0-9]+\.[0-9]+).*/\1/} DISTNAME= GConf-${PORTVERSION} diff --git a/devel/gconfmm26/Makefile b/devel/gconfmm26/Makefile index 3921941d4f3..c2db441261d 100644 --- a/devel/gconfmm26/Makefile +++ b/devel/gconfmm26/Makefile @@ -3,7 +3,7 @@ PORTNAME= gconfmm PORTVERSION= 2.28.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/devel/geany-plugin-pretty-printer/Makefile b/devel/geany-plugin-pretty-printer/Makefile index ac42c8e7947..09940a49acc 100644 --- a/devel/geany-plugin-pretty-printer/Makefile +++ b/devel/geany-plugin-pretty-printer/Makefile @@ -1,5 +1,6 @@ PORTNAME= geany-plugin-pretty-printer PORTVERSION= ${GEANY_VER} +PORTREVISION= 1 CATEGORIES= devel MAINTAINER= madpilot@FreeBSD.org diff --git a/devel/ghub/Makefile b/devel/ghub/Makefile index f64fdc7d048..979eabce855 100644 --- a/devel/ghub/Makefile +++ b/devel/ghub/Makefile @@ -1,7 +1,6 @@ PORTNAME= ghub DISTVERSIONPREFIX= v -DISTVERSION= 3.5.5 -PORTREVISION= 1 +DISTVERSION= 3.5.6 CATEGORIES= devel elisp PKGNAMESUFFIX= ${EMACS_PKGNAMESUFFIX} diff --git a/devel/ghub/distinfo b/devel/ghub/distinfo index a3793e26fa5..de6110538ab 100644 --- a/devel/ghub/distinfo +++ b/devel/ghub/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645075735 -SHA256 (magit-ghub-v3.5.5_GH0.tar.gz) = 8d167514ba15e3f6e3bf7c7ad7c4d024cde76134ceb7589e474d3ef0bf334aa6 -SIZE (magit-ghub-v3.5.5_GH0.tar.gz) = 59247 +TIMESTAMP = 1648212121 +SHA256 (magit-ghub-v3.5.6_GH0.tar.gz) = 0d5bd0b83f19459cf427d562a46ecfc9e1e8b52c584c96e34f5f39197fcda9cf +SIZE (magit-ghub-v3.5.6_GH0.tar.gz) = 59474 diff --git a/devel/gitaly/Makefile b/devel/gitaly/Makefile index ac656e9e8d7..ac8d14af7ee 100644 --- a/devel/gitaly/Makefile +++ b/devel/gitaly/Makefile @@ -1,5 +1,5 @@ PORTNAME= gitaly -DISTVERSION= 14.8.4 +DISTVERSION= 14.9.1 PORTREVISION= 0 CATEGORIES= devel @@ -14,7 +14,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE # it fixes segfaults reported here: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=259848 # define dependencies that are required for build and run under MY_DEPENDS -MY_DEPENDS= git>=2.33.0:devel/git \ +MY_DEPENDS= git>=2.35.1:devel/git \ rubygem-bundler>=2.1.4:sysutils/rubygem-bundler \ rubygem-rugged>=1.2<2:devel/rubygem-rugged \ rubygem-github-linguist>=7.12.1<8:textproc/rubygem-github-linguist \ @@ -49,7 +49,7 @@ MAKE_ENV+= ${GO_ENV} USE_GITLAB= yes GL_ACCOUNT= gitlab-org # Find this here: https://gitlab.com/gitlab-org/gitaly/-/tags -GL_COMMIT= d35ed2db0cdea9f7494a7f6758c06bacf0160c53 +GL_COMMIT= 402944bf57a1edd384e5fed24473f83280979a80 # for go dependencies USE_GITHUB= nodefault @@ -155,7 +155,7 @@ GH_TUPLE= \ jstemmer:go-junit-report:v0.9.1:jstemmer_go_junit_report/vendor/github.com/jstemmer/go-junit-report \ kelseyhightower:envconfig:v1.3.0:kelseyhightower_envconfig/vendor/github.com/kelseyhightower/envconfig \ kevinburke:ssh_config:4977a11b4351:kevinburke_ssh_config/vendor/github.com/kevinburke/ssh_config \ - libgit2:git2go:v33.0.6:libgit2_git2go_v33/vendor/github.com/libgit2/git2go/v33 \ + libgit2:git2go:v33.0.9:libgit2_git2go_v33/vendor/github.com/libgit2/git2go/v33 \ lightstep:lightstep-tracer-common:a8dfcb80d3a7:lightstep_lightstep_tracer_common_gogo/vendor/github.com/lightstep/lightstep-tracer-common \ lightstep:lightstep-tracer-go:v0.24.0:lightstep_lightstep_tracer_go/vendor/github.com/lightstep/lightstep-tracer-go \ mattn:go-ieproxy:v0.0.1:mattn_go_ieproxy/vendor/github.com/mattn/go-ieproxy \ @@ -201,6 +201,7 @@ GH_TUPLE= \ GL_TUPLE= gitlab-org:gitlab-shell:50da611814d256c77e689977265ec7e07633a4dc:gitlab_org_gitlab_shell/vendor/gitlab.com/gitlab-org/gitlab-shell \ gitlab-org:labkit:397363e2404ac0276b1959373a1c1c2bc1610d67:gitlab_org_labkit/vendor/gitlab.com/gitlab-org/labkit + # 13dd post-extract: @${RM} -r ${WRKSRC}/vendor/cloud.google.com/go/storage diff --git a/devel/gitaly/distinfo b/devel/gitaly/distinfo index 07e85a10fe6..d0e4d3bd7a6 100644 --- a/devel/gitaly/distinfo +++ b/devel/gitaly/distinfo @@ -1,4 +1,4 @@ -TIMESTAMP = 1647601503 +TIMESTAMP = 1648219174 SHA256 (Azure-azure-pipeline-go-v0.2.3_GH0.tar.gz) = 99bd58f4a07dd02d9615e3638b3bb6dbfad80ef678ccdb8e17e3fa2b0fef343e SIZE (Azure-azure-pipeline-go-v0.2.3_GH0.tar.gz) = 17102 SHA256 (Azure-azure-storage-blob-go-v0.13.0_GH0.tar.gz) = 6bf7145210331efa3f0417f6684cf764c22743cf23122048ec136600daebf443 @@ -197,8 +197,8 @@ SHA256 (kelseyhightower-envconfig-v1.3.0_GH0.tar.gz) = 3556a0d014ba778b78955e8ec SIZE (kelseyhightower-envconfig-v1.3.0_GH0.tar.gz) = 12408 SHA256 (kevinburke-ssh_config-4977a11b4351_GH0.tar.gz) = 568ea6be8d237ca16dcd03e2fa5e07461ab596ccff40197ac34dac43f97389ec SIZE (kevinburke-ssh_config-4977a11b4351_GH0.tar.gz) = 17407 -SHA256 (libgit2-git2go-v33.0.6_GH0.tar.gz) = 14b0e4604f97b40e91d5d54ac01f4e2ea56014cf647d16063d692fbe8269a33f -SIZE (libgit2-git2go-v33.0.6_GH0.tar.gz) = 129055 +SHA256 (libgit2-git2go-v33.0.9_GH0.tar.gz) = bcdaa5ed86d7ad513f51cdd80006a23a7fa9d9e68db06b3ce39a25a4196e4d67 +SIZE (libgit2-git2go-v33.0.9_GH0.tar.gz) = 130832 SHA256 (lightstep-lightstep-tracer-common-a8dfcb80d3a7_GH0.tar.gz) = 0403290c66b3dd109d0a726784b8b0a8583c5d19d35d12a0c2b89a434ed2d0c6 SIZE (lightstep-lightstep-tracer-common-a8dfcb80d3a7_GH0.tar.gz) = 58493 SHA256 (lightstep-lightstep-tracer-go-v0.24.0_GH0.tar.gz) = 794c5cea97c46ee5a9139c1b2770cbd81d4534ba4359a8b6b744566c4abfb15b @@ -281,8 +281,8 @@ SHA256 (xanzy-ssh-agent-v0.3.0_GH0.tar.gz) = 7ce80a93d0fdbeb6760f97d6d166d11c215 SIZE (xanzy-ssh-agent-v0.3.0_GH0.tar.gz) = 8421 SHA256 (gonum-gonum-v0.8.2_GH0.tar.gz) = a2aad1ac038d36d568939910f39eb0c58cae3c6f0b18df3ca74a8daa954d1663 SIZE (gonum-gonum-v0.8.2_GH0.tar.gz) = 3226037 -SHA256 (gitlab-org-gitaly-d35ed2db0cdea9f7494a7f6758c06bacf0160c53_GL0.tar.gz) = 225b707bf7fe39973c4e1a6c568caf909a7c5cf5cb92b68a0be8eb40500dad0f -SIZE (gitlab-org-gitaly-d35ed2db0cdea9f7494a7f6758c06bacf0160c53_GL0.tar.gz) = 3804195 +SHA256 (gitlab-org-gitaly-402944bf57a1edd384e5fed24473f83280979a80_GL0.tar.gz) = 5f5e984242457edab9922df475c2f9b8bbb2e7f5d120789e3baa5de81fc33c9a +SIZE (gitlab-org-gitaly-402944bf57a1edd384e5fed24473f83280979a80_GL0.tar.gz) = 3850451 SHA256 (gitlab-org-gitlab-shell-50da611814d256c77e689977265ec7e07633a4dc_GL0.tar.gz) = a00e10d3dbe50e7c70b75c5fcf7d42a039a24c13b0b751a0339bb18261ac50af SIZE (gitlab-org-gitlab-shell-50da611814d256c77e689977265ec7e07633a4dc_GL0.tar.gz) = 124516 SHA256 (gitlab-org-labkit-397363e2404ac0276b1959373a1c1c2bc1610d67_GL0.tar.gz) = e48dae8ea183f946189a9ac7e4cbe4bc0fa583e6baafb0074a3463a879565ecb diff --git a/devel/gitaly/files/patch-Makefile b/devel/gitaly/files/patch-Makefile index e97345affa2..61a17c80685 100644 --- a/devel/gitaly/files/patch-Makefile +++ b/devel/gitaly/files/patch-Makefile @@ -1,6 +1,6 @@ ---- Makefile.orig 2022-02-25 19:11:57 UTC +--- Makefile.orig 2022-03-21 09:11:43 UTC +++ Makefile -@@ -317,7 +317,7 @@ help: +@@ -314,7 +314,7 @@ help: .PHONY: build ## Build Go binaries and install required Ruby Gems. @@ -9,7 +9,7 @@ @ # We used to install Gitaly binaries into the source directory by default when executing @ # "make" or "make all", which has been changed in v14.5 to only build binaries into @ # `_build/bin`. In order to quickly fail in case any source install still refers to these -@@ -348,7 +348,7 @@ $(call find_commands): +@@ -345,7 +345,7 @@ $(call find_commands): @ # This fallback is unique but non-deterministic, making it sufficient to avoid generating the @ # GNU build-id from the empty string and causing guaranteed collisions. GO_BUILD_ID=$$( go tool buildid $(addprefix ${BUILD_DIR}/bin/, $@) || openssl rand -hex 32 ) && \ @@ -18,7 +18,7 @@ go install -ldflags '${GO_LDFLAGS}'" -B 0x$$GNU_BUILD_ID" -tags "${GO_BUILD_TAGS}" $(addprefix ${GITALY_PACKAGE}/cmd/, $@) endif -@@ -558,7 +558,7 @@ libgit2: ${LIBGIT2_INSTALL_DIR}/lib/libgit2.a +@@ -559,7 +559,7 @@ libgit2: ${LIBGIT2_INSTALL_DIR}/lib/libgit2.a # step. Both Omnibus and CNG assume it is in the Gitaly root, not in # _build. Hence the '../' in front. ${SOURCE_DIR}/.ruby-bundle: ${GITALY_RUBY_DIR}/Gemfile.lock ${GITALY_RUBY_DIR}/Gemfile diff --git a/devel/gitg/Makefile b/devel/gitg/Makefile index 2dcd8cda955..8a44a200483 100644 --- a/devel/gitg/Makefile +++ b/devel/gitg/Makefile @@ -2,7 +2,7 @@ PORTNAME= gitg PORTVERSION= 3.32.1 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= devel deskutils gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/devel/gnome-builder/Makefile b/devel/gnome-builder/Makefile index bd9ccd58ce2..1bac1ce6f82 100644 --- a/devel/gnome-builder/Makefile +++ b/devel/gnome-builder/Makefile @@ -1,5 +1,6 @@ PORTNAME= gnome-builder PORTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= devel gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/devel/goffice/Makefile b/devel/goffice/Makefile index 4adcd0fdd7c..60f0275ddd7 100644 --- a/devel/goffice/Makefile +++ b/devel/goffice/Makefile @@ -2,6 +2,7 @@ PORTNAME= goffice PORTVERSION= 0.10.50 +PORTREVISION= 1 CATEGORIES= devel gnome MASTER_SITES= GNOME diff --git a/devel/gstreamer1-plugins-soup/Makefile b/devel/gstreamer1-plugins-soup/Makefile index 46509fd37fe..57c79bb3767 100644 --- a/devel/gstreamer1-plugins-soup/Makefile +++ b/devel/gstreamer1-plugins-soup/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= devel COMMENT= GStreamer soup based http input plugin diff --git a/devel/gtranslator/Makefile b/devel/gtranslator/Makefile index 21519432020..9f392657dbb 100644 --- a/devel/gtranslator/Makefile +++ b/devel/gtranslator/Makefile @@ -2,6 +2,7 @@ PORTNAME= gtranslator PORTVERSION= 40.0 +PORTREVISION= 1 CATEGORIES= devel gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} diff --git a/devel/gvfs/Makefile b/devel/gvfs/Makefile index 11e9ec9e969..0270e090f06 100644 --- a/devel/gvfs/Makefile +++ b/devel/gvfs/Makefile @@ -2,6 +2,7 @@ PORTNAME= gvfs PORTVERSION= 1.46.2 +PORTREVISION= 1 CATEGORIES= devel gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/devel/hwloc/Makefile b/devel/hwloc/Makefile index e58d77b2605..e619ee87f7d 100644 --- a/devel/hwloc/Makefile +++ b/devel/hwloc/Makefile @@ -1,5 +1,6 @@ PORTNAME= hwloc PORTVERSION= 1.11.13 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= http://www.open-mpi.org/software/hwloc/v${PORTVERSION:R}/downloads/ \ http://icl.cs.utk.edu/open-mpi/software/hwloc/v${PORTVERSION:R}/downloads/ diff --git a/devel/hwloc2/Makefile b/devel/hwloc2/Makefile index e254ec29e9c..019f893032b 100644 --- a/devel/hwloc2/Makefile +++ b/devel/hwloc2/Makefile @@ -1,5 +1,6 @@ PORTNAME= hwloc DISTVERSION= 2.7.0 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= https://download.open-mpi.org/release/hwloc/v${PORTVERSION:R}/ PKGNAMESUFFIX= 2 diff --git a/devel/inih/Makefile b/devel/inih/Makefile index 9c1ead2c970..975185e79d3 100644 --- a/devel/inih/Makefile +++ b/devel/inih/Makefile @@ -1,6 +1,6 @@ PORTNAME= inih DISTVERSIONPREFIX= r -DISTVERSION= 53 +DISTVERSION= 55 CATEGORIES= devel MAINTAINER= yuri@FreeBSD.org @@ -20,6 +20,7 @@ GH_ACCOUNT= benhoyt do-test: @cd ${WRKSRC}/tests && \ ${REINPLACE_CMD} -e 's|gcc|${CC}|' unittest.sh && \ - ./unittest.sh + ./unittest.sh && \ + ${ECHO} "Tests succeeded" .include diff --git a/devel/inih/distinfo b/devel/inih/distinfo index 5f697ff8dbb..186b1b1049f 100644 --- a/devel/inih/distinfo +++ b/devel/inih/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1615838417 -SHA256 (benhoyt-inih-r53_GH0.tar.gz) = 01b0366fdfdf6363efc070c2f856f1afa33e7a6546548bada5456ad94a516241 -SIZE (benhoyt-inih-r53_GH0.tar.gz) = 16984 +TIMESTAMP = 1648347243 +SHA256 (benhoyt-inih-r55_GH0.tar.gz) = ba55f8ae2a8caf0653f30f48567241e14ea916acfc13481f502d8a9c8f507f68 +SIZE (benhoyt-inih-r55_GH0.tar.gz) = 18219 diff --git a/devel/kf5-kdoctools/Makefile b/devel/kf5-kdoctools/Makefile index 408fc0d363d..5b4d676224b 100644 --- a/devel/kf5-kdoctools/Makefile +++ b/devel/kf5-kdoctools/Makefile @@ -1,5 +1,6 @@ PORTNAME= kdoctools DISTVERSION= ${KDE_FRAMEWORKS_VERSION} +PORTREVISION= 1 CATEGORIES= devel kde kde-frameworks MAINTAINER= kde@FreeBSD.org diff --git a/devel/kf5-kio/Makefile b/devel/kf5-kio/Makefile index 7fc3cb5ebb6..1e00e70d374 100644 --- a/devel/kf5-kio/Makefile +++ b/devel/kf5-kio/Makefile @@ -1,5 +1,6 @@ PORTNAME= kio DISTVERSION= ${KDE_FRAMEWORKS_VERSION} +PORTREVISION= 1 CATEGORIES= devel kde kde-frameworks MAINTAINER= kde@FreeBSD.org diff --git a/devel/lazygit/Makefile b/devel/lazygit/Makefile index 1f121cc4a63..84d0b1a306e 100644 --- a/devel/lazygit/Makefile +++ b/devel/lazygit/Makefile @@ -1,6 +1,6 @@ PORTNAME= lazygit DISTVERSIONPREFIX= v -DISTVERSION= 0.33 +DISTVERSION= 0.34 CATEGORIES= devel MAINTAINER= meta@FreeBSD.org @@ -25,7 +25,7 @@ GH_TUPLE= \ fatih:color:v1.9.0:fatih_color/vendor/github.com/fatih/color \ fsnotify:fsnotify:v1.4.7:fsnotify_fsnotify/vendor/github.com/fsnotify/fsnotify \ gdamore:encoding:v1.0.0:gdamore_encoding/vendor/github.com/gdamore/encoding \ - gdamore:tcell:66f061b1fc9b:gdamore_tcell_v2/vendor/github.com/gdamore/tcell/v2 \ + gdamore:tcell:2a1a1b586447:gdamore_tcell_v2/vendor/github.com/gdamore/tcell/v2 \ go-errors:errors:v1.4.1:go_errors_errors/vendor/github.com/go-errors/errors \ go-git:gcfg:v1.5.0:go_git_gcfg/vendor/github.com/go-git/gcfg \ go-git:go-billy:v5.0.0:go_git_go_billy_v5/vendor/github.com/go-git/go-billy/v5 \ diff --git a/devel/lazygit/distinfo b/devel/lazygit/distinfo index a19948e9072..eee2df4622d 100644 --- a/devel/lazygit/distinfo +++ b/devel/lazygit/distinfo @@ -1,6 +1,6 @@ -TIMESTAMP = 1647178746 -SHA256 (jesseduffield-lazygit-v0.33_GH0.tar.gz) = ff7ab429ecd54f6e2aedcfd4835eb8ca17c1990aac2534f8c2a19ce4267ade24 -SIZE (jesseduffield-lazygit-v0.33_GH0.tar.gz) = 3729628 +TIMESTAMP = 1648449937 +SHA256 (jesseduffield-lazygit-v0.34_GH0.tar.gz) = f715ab86b219fd42462399459bfa1e04a5925268bff4839c4d96bd01264d6847 +SIZE (jesseduffield-lazygit-v0.34_GH0.tar.gz) = 3733055 SHA256 (OpenPeeDeeP-xdg-v1.0.0_GH0.tar.gz) = d565019d49bc1b47e777981e4fa70bcbe00e3c77b7263d50433230ae77b94f47 SIZE (OpenPeeDeeP-xdg-v1.0.0_GH0.tar.gz) = 6720 SHA256 (atotto-clipboard-v0.1.2_GH0.tar.gz) = 84704a60eb4de90eaebe6e8cbfab30bb53d103c26c25560d67d13469fd17934e @@ -23,8 +23,8 @@ SHA256 (fsnotify-fsnotify-v1.4.7_GH0.tar.gz) = b7530d973d0ab0e58ad8ce1b9a4b963d6 SIZE (fsnotify-fsnotify-v1.4.7_GH0.tar.gz) = 31139 SHA256 (gdamore-encoding-v1.0.0_GH0.tar.gz) = 8fb8593a69cd86f16233d63dd6d74181136ec8f22900c509e961a25eb4b2e013 SIZE (gdamore-encoding-v1.0.0_GH0.tar.gz) = 10886 -SHA256 (gdamore-tcell-66f061b1fc9b_GH0.tar.gz) = fa27985fc406b8908ebc3d2987f06d8d317e293522ea5d3bbf46919fc07accab -SIZE (gdamore-tcell-66f061b1fc9b_GH0.tar.gz) = 156184 +SHA256 (gdamore-tcell-2a1a1b586447_GH0.tar.gz) = c598e954579baf6998d5b782f84ddfe86681c0cfc6481a54208912c4cd0b2f03 +SIZE (gdamore-tcell-2a1a1b586447_GH0.tar.gz) = 160199 SHA256 (go-errors-errors-v1.4.1_GH0.tar.gz) = 2c832b791657125859015a2ffdc01e95486191a8ca12d894dcad59414ada4923 SIZE (go-errors-errors-v1.4.1_GH0.tar.gz) = 9117 SHA256 (go-git-gcfg-v1.5.0_GH0.tar.gz) = 662e46a93aba5ffe383e55597ce5749447e5c8e9409b1452f5790bfd6e1f8a11 diff --git a/devel/lfcbase/Makefile b/devel/lfcbase/Makefile index 5bd68ab5007..e541dc85de5 100644 --- a/devel/lfcbase/Makefile +++ b/devel/lfcbase/Makefile @@ -1,5 +1,5 @@ PORTNAME= lfcbase -PORTVERSION= 1.16.2 +PORTVERSION= 1.16.3 CATEGORIES= devel MASTER_SITES= http://www.lemke-it.com/ diff --git a/devel/lfcbase/distinfo b/devel/lfcbase/distinfo index fffaa2d2f6f..fb318bdb6c8 100644 --- a/devel/lfcbase/distinfo +++ b/devel/lfcbase/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1625343761 -SHA256 (lfcbase-1.16.2.tar.gz) = 7ac8fc8586c978f868ad4fa0e7a4f1b5109cf4e3675bcc475dc74526f2c40675 -SIZE (lfcbase-1.16.2.tar.gz) = 648935 +TIMESTAMP = 1648293648 +SHA256 (lfcbase-1.16.3.tar.gz) = 05fd2ce269daf15e875c19c6b4441cd4b3183b51a6679d7a269b463190d21857 +SIZE (lfcbase-1.16.3.tar.gz) = 649122 diff --git a/devel/libabigail/Makefile b/devel/libabigail/Makefile index cf6456ef410..3958a532371 100644 --- a/devel/libabigail/Makefile +++ b/devel/libabigail/Makefile @@ -1,5 +1,6 @@ PORTNAME= libabigail DISTVERSION= 2.0 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= SOURCEWARE/${PORTNAME}/ diff --git a/devel/libaravis/Makefile b/devel/libaravis/Makefile index eb9baec0ba9..cf3dccf6d26 100644 --- a/devel/libaravis/Makefile +++ b/devel/libaravis/Makefile @@ -2,6 +2,7 @@ PORTNAME= libaravis DISTVERSION= 0.8.20 +PORTREVISION= 1 CATEGORIES= devel PATCH_SITES= https://github.com/${GH_ACCOUNT}/${GH_PROJECT}/commit/ diff --git a/devel/libassetml/Makefile b/devel/libassetml/Makefile index 69c62117d14..f0a2787a58c 100644 --- a/devel/libassetml/Makefile +++ b/devel/libassetml/Makefile @@ -2,7 +2,7 @@ PORTNAME= libassetml PORTVERSION= 1.2.1 -PORTREVISION= 14 +PORTREVISION= 15 CATEGORIES= devel gnome MASTER_SITES= SF/ofset/${PORTNAME}/${PORTVERSION} diff --git a/devel/libdap/Makefile b/devel/libdap/Makefile index 05df30e0a5a..826aca6d8b9 100644 --- a/devel/libdap/Makefile +++ b/devel/libdap/Makefile @@ -2,6 +2,7 @@ PORTNAME= libdap PORTVERSION= 3.20.9 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= https://www.opendap.org/pub/source/ \ LOCAL/sunpoet diff --git a/devel/libdbusmenu/Makefile b/devel/libdbusmenu/Makefile index bc66453e1f1..fdf506ab58a 100644 --- a/devel/libdbusmenu/Makefile +++ b/devel/libdbusmenu/Makefile @@ -2,7 +2,7 @@ PORTNAME= libdbusmenu PORTVERSION= 16.04.0 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= devel MASTER_SITES= https://launchpadlibrarian.net/243440794/ \ https://launchpad.net/${PORTNAME}/${PORTVERSION:R}/${PORTVERSION}/+download/ diff --git a/devel/libgdata/Makefile b/devel/libgdata/Makefile index 520b57e637a..8999f697282 100644 --- a/devel/libgdata/Makefile +++ b/devel/libgdata/Makefile @@ -2,7 +2,7 @@ PORTNAME= libgdata PORTVERSION= 0.17.13 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel MASTER_SITES= GNOME diff --git a/devel/libght/Makefile b/devel/libght/Makefile index 48dbc123642..09058469aef 100644 --- a/devel/libght/Makefile +++ b/devel/libght/Makefile @@ -2,7 +2,7 @@ PORTNAME= libght PORTVERSION= 0.1.0 -PORTREVISION= 3 +PORTREVISION= 4 DISTVERSIONPREFIX= v CATEGORIES= devel geography diff --git a/devel/libglade2/Makefile b/devel/libglade2/Makefile index e2513a4ca1d..99eedbe68b6 100644 --- a/devel/libglade2/Makefile +++ b/devel/libglade2/Makefile @@ -2,7 +2,7 @@ PORTNAME= libglade2 PORTVERSION= 2.6.4 -PORTREVISION?= 10 +PORTREVISION?= 11 CATEGORIES= devel gnome MASTER_SITES= GNOME/sources/${PORTNAME:S/2$//}/${PORTVERSION:C/..$//} DISTNAME= ${PORTNAME:S/2$//}-${PORTVERSION} diff --git a/devel/libglademm24/Makefile b/devel/libglademm24/Makefile index fb19daf5de6..5a9f4ad8937 100644 --- a/devel/libglademm24/Makefile +++ b/devel/libglademm24/Makefile @@ -3,7 +3,7 @@ PORTNAME= libglademm PORTVERSION= 2.6.7 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= devel gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/devel/libgnt/Makefile b/devel/libgnt/Makefile index 270323b9851..592700e4fc1 100644 --- a/devel/libgnt/Makefile +++ b/devel/libgnt/Makefile @@ -1,5 +1,6 @@ PORTNAME= libgnt PORTVERSION= 2.14.3 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= SF/pidgin/${PORTNAME}/${PORTVERSION}/ diff --git a/devel/libgsf/Makefile b/devel/libgsf/Makefile index 891ead57185..d4eadd1a5bb 100644 --- a/devel/libgsf/Makefile +++ b/devel/libgsf/Makefile @@ -2,7 +2,7 @@ PORTNAME= libgsf PORTVERSION= 1.14.47 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/devel/libical/Makefile b/devel/libical/Makefile index d58bdaacbff..6abbf15faa8 100644 --- a/devel/libical/Makefile +++ b/devel/libical/Makefile @@ -2,7 +2,7 @@ PORTNAME= libical DISTVERSION= 3.0.8 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= devel MASTER_SITES= https://github.com/libical/libical/releases/download/v${PORTVERSION}/ diff --git a/devel/libiqxmlrpc/Makefile b/devel/libiqxmlrpc/Makefile index b08035f33f4..a2dea7a917d 100644 --- a/devel/libiqxmlrpc/Makefile +++ b/devel/libiqxmlrpc/Makefile @@ -2,7 +2,7 @@ PORTNAME= libiqxmlrpc PORTVERSION= 0.13.5 -PORTREVISION= 13 +PORTREVISION= 14 CATEGORIES= devel net MASTER_SITES= SF diff --git a/devel/liblangtag/Makefile b/devel/liblangtag/Makefile index b8e8340328c..123e455cfb1 100644 --- a/devel/liblangtag/Makefile +++ b/devel/liblangtag/Makefile @@ -1,5 +1,6 @@ PORTNAME= liblangtag PORTVERSION= 0.6.3 +PORTREVISION= 1 CATEGORIES= devel textproc MASTER_SITES= https://bitbucket.org/tagoh/liblangtag/downloads/ \ LOCAL/jkim diff --git a/devel/libosinfo/Makefile b/devel/libosinfo/Makefile index 944aeb2ba96..ade322e88d9 100644 --- a/devel/libosinfo/Makefile +++ b/devel/libosinfo/Makefile @@ -2,6 +2,7 @@ PORTNAME= libosinfo PORTVERSION= 1.10.0 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= https://releases.pagure.org/libosinfo/ diff --git a/devel/libpafe-ruby/files/patch-extconf.rb b/devel/libpafe-ruby/files/patch-extconf.rb new file mode 100644 index 00000000000..dcb3ccf9ade --- /dev/null +++ b/devel/libpafe-ruby/files/patch-extconf.rb @@ -0,0 +1,11 @@ +--- extconf.rb.orig 2011-08-12 11:59:29 UTC ++++ extconf.rb +@@ -1,7 +1,7 @@ + # $Id: extconf.rb,v 1.3 2008-01-15 12:23:29 hito Exp $ + require 'mkmf' + +-$CFLAGS = "-Wall -O2" ++$CFLAGS = "-Wall -O2 -fdeclspec" + + have_header("libpafe/libpafe.h") + have_library("pafe", "pasori_open") diff --git a/devel/libplist/Makefile b/devel/libplist/Makefile index 56e1ba9e522..5d143d0e048 100644 --- a/devel/libplist/Makefile +++ b/devel/libplist/Makefile @@ -1,6 +1,6 @@ PORTNAME= libplist PORTVERSION= 2.2.0 -PORTREVISION?= 0 +PORTREVISION?= 1 CATEGORIES= devel MASTER_SITES= https://github.com/libimobiledevice/${PORTNAME}/releases/download/${PORTVERSION}/ diff --git a/devel/libqb/Makefile b/devel/libqb/Makefile index 1bf90652b3b..7c04339721f 100644 --- a/devel/libqb/Makefile +++ b/devel/libqb/Makefile @@ -2,6 +2,7 @@ PORTNAME= libqb DISTVERSION= 2.0.4 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= https://github.com/ClusterLabs/${PORTNAME}/releases/download/v${DISTVERSION}/ diff --git a/devel/librcc/Makefile b/devel/librcc/Makefile index 98c4f5b83f9..7147d3f0d3a 100644 --- a/devel/librcc/Makefile +++ b/devel/librcc/Makefile @@ -2,7 +2,7 @@ PORTNAME= librcc PORTVERSION= 0.2.12 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= devel MASTER_SITES= http://dside.dyndns.org/files/rusxmms/ \ LOCAL/fluffy diff --git a/devel/librest/Makefile b/devel/librest/Makefile index b27048722b8..d54c7937a01 100644 --- a/devel/librest/Makefile +++ b/devel/librest/Makefile @@ -2,6 +2,7 @@ PORTNAME= rest PORTVERSION= 0.8.1 +PORTREVISION= 1 CATEGORIES= devel www MASTER_SITES= GNOME diff --git a/devel/libsigrokdecode/Makefile b/devel/libsigrokdecode/Makefile index dc1b8159163..9319575e4b5 100644 --- a/devel/libsigrokdecode/Makefile +++ b/devel/libsigrokdecode/Makefile @@ -11,9 +11,11 @@ COMMENT= Framework for hardware logic analyzers, protocol decoders library LICENSE= GPLv3+ LICENSE_FILE= ${WRKSRC}/COPYING -GNU_CONFIGURE= yes USES= gnome libtool pathfix pkgconfig python:3.4+ USE_GNOME= glib20 + +GNU_CONFIGURE= yes +CONFIGURE_ENV= PYTHON3=${PYTHON_CMD} MAKE_JOBS_UNSAFE= yes USE_LDCONFIG= yes INSTALL_TARGET= install-strip diff --git a/devel/libsigrokdecode/files/patch-configure b/devel/libsigrokdecode/files/patch-configure new file mode 100644 index 00000000000..8793549374d --- /dev/null +++ b/devel/libsigrokdecode/files/patch-configure @@ -0,0 +1,44 @@ +--- configure.orig 2019-12-11 21:37:52 UTC ++++ configure +@@ -13200,6 +13200,30 @@ sr_pkg_check_summary_append() { + # first, since usually only that variant will add "-lpython3.8". + # https://docs.python.org/3/whatsnew/3.8.html#debug-build-uses-the-same-abi-as-release-build + if test -n "$PKG_CONFIG" && \ ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"python-3.9-embed\""; } >&5 ++ ($PKG_CONFIG --exists --print-errors "python-3.9-embed") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ sr_have_python3=yes ++ SRD_PKGLIBS=${SRD_PKGLIBS}${SRD_PKGLIBS:+' '}"python-3.9-embed" ++ sr_python3_version=`$PKG_CONFIG --modversion "python-3.9-embed" 2>&5` ++ sr_pkg_check_summary_append "python-3.9-embed" "$sr_python3_version" ++else ++ sr_pkg_check_summary_append "python-3.9-embed" no ++ if test -n "$PKG_CONFIG" && \ ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"python-3.9 >= 3.9\""; } >&5 ++ ($PKG_CONFIG --exists --print-errors "python-3.9 >= 3.9") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ sr_have_python3=yes ++ SRD_PKGLIBS=${SRD_PKGLIBS}${SRD_PKGLIBS:+' '}"python-3.9 >= 3.9" ++ sr_python3_version=`$PKG_CONFIG --modversion "python-3.9 >= 3.9" 2>&5` ++ sr_pkg_check_summary_append "python-3.9 >= 3.9" "$sr_python3_version" ++else ++ sr_pkg_check_summary_append "python-3.9 >= 3.8" no ++ if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"python-3.8-embed\""; } >&5 + ($PKG_CONFIG --exists --print-errors "python-3.8-embed") 2>&5 + ac_status=$? +@@ -13308,6 +13332,10 @@ else + else + sr_pkg_check_summary_append "python3 >= 3.2" no + sr_have_python3=no sr_python3_version= ++fi ++ ++fi ++ + fi + + fi diff --git a/devel/libsmpp34/Makefile b/devel/libsmpp34/Makefile index c4fc691d8ee..21b1346c08c 100644 --- a/devel/libsmpp34/Makefile +++ b/devel/libsmpp34/Makefile @@ -1,5 +1,6 @@ PORTNAME= libsmpp34 PORTVERSION= 1.10 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= http://chaosophia.net/downloads/osmocom/${PORTNAME}/ diff --git a/devel/libsoup/Makefile b/devel/libsoup/Makefile index 680e1b4748a..ee227fc9c32 100644 --- a/devel/libsoup/Makefile +++ b/devel/libsoup/Makefile @@ -2,6 +2,7 @@ PORTNAME= libsoup DISTVERSION= 2.74.0 +PORTREVISION= 1 CATEGORIES= devel gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/devel/libsoup3/Makefile b/devel/libsoup3/Makefile index c79dc54435b..eaf491f08f0 100644 --- a/devel/libsoup3/Makefile +++ b/devel/libsoup3/Makefile @@ -1,6 +1,6 @@ PORTNAME= libsoup PORTVERSION= 3.0.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/devel/libvirt-glib/Makefile b/devel/libvirt-glib/Makefile index 9adece77318..d4a476e1fdc 100644 --- a/devel/libvirt-glib/Makefile +++ b/devel/libvirt-glib/Makefile @@ -2,6 +2,7 @@ PORTNAME= libvirt-glib PORTVERSION= 4.0.0 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= ftp://libvirt.org/libvirt/glib/ diff --git a/devel/libvirt/Makefile b/devel/libvirt/Makefile index a43e7e09ec4..60124d5dec6 100644 --- a/devel/libvirt/Makefile +++ b/devel/libvirt/Makefile @@ -2,6 +2,7 @@ PORTNAME= libvirt PORTVERSION= 8.1.0 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= http://libvirt.org/sources/ \ ftp://libvirt.org/libvirt/ diff --git a/devel/llvm-cheri/Makefile b/devel/llvm-cheri/Makefile index 936ed3fd565..e6d2cd30d12 100644 --- a/devel/llvm-cheri/Makefile +++ b/devel/llvm-cheri/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm PORTVERSION= ${LLVM_MAJOR}.0.d${SNAPDATE} -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= devel lang PKGNAMESUFFIX= ${LLVM_SUFFIX} diff --git a/devel/llvm-devel/Makefile b/devel/llvm-devel/Makefile index 4ae2edda93b..19b026594a5 100644 --- a/devel/llvm-devel/Makefile +++ b/devel/llvm-devel/Makefile @@ -2,7 +2,7 @@ PORTNAME= llvm PORTVERSION= ${LLVM_MAJOR}.0.d${SNAPDATE} -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= devel lang PKGNAMESUFFIX= ${LLVM_SUFFIX} diff --git a/devel/llvm10/Makefile b/devel/llvm10/Makefile index ae33d17cce6..58ff01de01e 100644 --- a/devel/llvm10/Makefile +++ b/devel/llvm10/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm DISTVERSION= 10.0.1 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= devel lang MASTER_SITES= https://github.com/llvm/llvm-project/releases/download/llvmorg-${DISTVERSION:S/rc/-rc/}/ \ https://${PRE_}releases.llvm.org/${LLVM_RELEASE}/${RCDIR} diff --git a/devel/llvm11/Makefile b/devel/llvm11/Makefile index 992203d6ee6..903137a908e 100644 --- a/devel/llvm11/Makefile +++ b/devel/llvm11/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm DISTVERSION= 11.0.1 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= devel lang MASTER_SITES= https://github.com/llvm/llvm-project/releases/download/llvmorg-${DISTVERSION:S/rc/-rc/}/ \ https://${PRE_}releases.llvm.org/${LLVM_RELEASE}/${RCDIR} diff --git a/devel/llvm12/Makefile b/devel/llvm12/Makefile index 0427b8ca484..98d979905ca 100644 --- a/devel/llvm12/Makefile +++ b/devel/llvm12/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm DISTVERSION= 12.0.1 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= devel lang MASTER_SITES= https://github.com/llvm/llvm-project/releases/download/llvmorg-${DISTVERSION:S/rc/-rc/}/ \ https://${PRE_}releases.llvm.org/${LLVM_RELEASE}/${RCDIR} diff --git a/devel/llvm13/Makefile b/devel/llvm13/Makefile index 642126d32fd..ef6e4395563 100644 --- a/devel/llvm13/Makefile +++ b/devel/llvm13/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm DISTVERSION= 13.0.1 -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= devel lang MASTER_SITES= https://github.com/llvm/llvm-project/releases/download/llvmorg-${DISTVERSION:S/rc/-rc/}/ \ https://${PRE_}releases.llvm.org/${LLVM_RELEASE}/${RCDIR} diff --git a/devel/llvm14/Makefile b/devel/llvm14/Makefile index 73d459cc20f..8e3bb67aa2b 100644 --- a/devel/llvm14/Makefile +++ b/devel/llvm14/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm -DISTVERSION= 14.0.0rc4 -PORTREVISION= 0 +DISTVERSION= 14.0.0 +PORTREVISION= 1 CATEGORIES= devel lang MASTER_SITES= https://github.com/llvm/llvm-project/releases/download/llvmorg-${DISTVERSION:S/rc/-rc/}/ \ https://${PRE_}releases.llvm.org/${LLVM_RELEASE}/${RCDIR} @@ -134,9 +134,6 @@ CLANG_SUB_LIST= XCC=clang${LLVM_SUFFIX} \ X_COMPILER_TYPE=clang CLANG_USE= GNOME=libxml2 COMPILER_RT_DESC= Sanitizer libraries -# An unwind.h is require to build. I think an in-tree one should be used -# but this seems to work as a workaround... -COMPILER_RT_BUILD_DEPENDS= libunwind>0:devel/libunwind COMPILER_RT_CMAKE_ON= -DCOMPILER_RT_INSTALL_PATH=${LLVM_PREFIX}/lib/clang/${LLVM_RELEASE} # Allow unwind.h to be found reliably COMPILER_RT_USES= localbase @@ -273,6 +270,12 @@ MLIR_PATTERN= ${MLIR_COMMANDS:S/^/bin./:tW:C/ */|/g}|mlir|libMLIR|obj.MLIRCAP .include +.if ${OSVERSION} < 1300525 +# An unwind.h is require to build. I think an in-tree one should be used +# but this seems to work as a workaround... +COMPILER_RT_BUILD_DEPENDS= libunwind>0:devel/libunwind +.endif + .if defined(PPC_ABI) && ${PPC_ABI} == ELFv2 EXTRA_PATCHES= ${FILESDIR}/extra-patch-clang_lib_Driver_ToolChains_Clang.cpp .endif diff --git a/devel/llvm14/distinfo b/devel/llvm14/distinfo index 8351353de4d..88c0f919208 100644 --- a/devel/llvm14/distinfo +++ b/devel/llvm14/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647237419 -SHA256 (llvm-project-14.0.0rc4.src.tar.xz) = ca833d5ca97b2d46053f6d2759fcd9ec3201201275e0c2119a4680356f5b0a15 -SIZE (llvm-project-14.0.0rc4.src.tar.xz) = 105583596 +TIMESTAMP = 1648147417 +SHA256 (llvm-project-14.0.0.src.tar.xz) = 35ce9edbc8f774fe07c8f4acdf89ec8ac695c8016c165dd86b8d10e7cba07e23 +SIZE (llvm-project-14.0.0.src.tar.xz) = 105585028 diff --git a/devel/llvm70/Makefile b/devel/llvm70/Makefile index f669bc190c5..9bdb4a63f71 100644 --- a/devel/llvm70/Makefile +++ b/devel/llvm70/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm DISTVERSION= 7.0.1 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= devel lang MASTER_SITES= http://${PRE_}releases.llvm.org/${LLVM_RELEASE}/${RCDIR} PKGNAMESUFFIX= ${LLVM_SUFFIX} diff --git a/devel/llvm80/Makefile b/devel/llvm80/Makefile index 82ac6ee4bc6..eabca2a91af 100644 --- a/devel/llvm80/Makefile +++ b/devel/llvm80/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm DISTVERSION= 8.0.1 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= devel lang MASTER_SITES= https://github.com/llvm/llvm-project/releases/download/llvmorg-${DISTVERSION}/ PKGNAMESUFFIX= ${LLVM_SUFFIX} diff --git a/devel/llvm90/Makefile b/devel/llvm90/Makefile index 04f794ea57d..1673485c2c3 100644 --- a/devel/llvm90/Makefile +++ b/devel/llvm90/Makefile @@ -1,6 +1,6 @@ PORTNAME= llvm DISTVERSION= 9.0.1 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= devel lang MASTER_SITES= https://github.com/llvm/llvm-project/releases/download/llvmorg-${DISTVERSION}/ PKGNAMESUFFIX= ${LLVM_SUFFIX} diff --git a/devel/m17n-lib/Makefile b/devel/m17n-lib/Makefile index 266a253c984..0903f57d289 100644 --- a/devel/m17n-lib/Makefile +++ b/devel/m17n-lib/Makefile @@ -2,7 +2,7 @@ PORTNAME= m17n-lib PORTVERSION= 1.8.0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= devel textproc MASTER_SITES= SAVANNAH/m17n diff --git a/devel/msitools/Makefile b/devel/msitools/Makefile index e69511d0c6d..3ac8d93d682 100644 --- a/devel/msitools/Makefile +++ b/devel/msitools/Makefile @@ -1,5 +1,6 @@ PORTNAME= msitools DISTVERSION= 0.101 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= GNOME diff --git a/devel/ocaml-sdl/files/patch-src_Makefile b/devel/ocaml-sdl/files/patch-src_Makefile new file mode 100644 index 00000000000..f6c2d206353 --- /dev/null +++ b/devel/ocaml-sdl/files/patch-src_Makefile @@ -0,0 +1,20 @@ +--- src/Makefile.orig 2010-04-19 20:11:04 UTC ++++ src/Makefile +@@ -17,7 +17,7 @@ gfx_MODULES = sdlgfx + + TARGETS = sdl.cma $(patsubst %,sdl%.cma,$(PARTS)) + TARGETS += libsdlstub.$(A) $(patsubst %,libsdl%stub.$(A),$(PARTS)) +-ifdef OCAMLOPT ++ifneq ($(OCAMLOPT),no) + TARGETS += sdl.cmxa $(patsubst %,sdl%.cmxa,$(PARTS)) + endif + +@@ -87,7 +87,7 @@ ifdef OCAMLFIND + install-findlib : + $(OCAMLFIND) install sdl ../META \ + *.cma lib*.$(A) *.cmi *.mli \ +- $(if $(OCAMLOPT),*.cmxa sdl*.$(A) *.cmx) \ ++ $(if $(filter-out no,$(OCAMLOPT)),*.cmxa sdl*.$(A) *.cmx) \ + $(if $(OCAMLMKLIB),dll*.so) + ifeq ($(PLATFORM),Apple) + $(RANLIB) $$($(OCAMLFIND) printconf destdir)/sdl/*.$(A) diff --git a/devel/ocaml-sdl/files/patch-src_sdlmouse.ml b/devel/ocaml-sdl/files/patch-src_sdlmouse.ml new file mode 100644 index 00000000000..1690b0937f9 --- /dev/null +++ b/devel/ocaml-sdl/files/patch-src_sdlmouse.ml @@ -0,0 +1,17 @@ +--- src/sdlmouse.ml.orig 2011-04-10 15:33:52 UTC ++++ src/sdlmouse.ml +@@ -49,12 +49,12 @@ external cursor_data : cursor -> cursor_data + = "ml_SDL_Cursor_data" + + let string_of_bits x = +- let s = String.make 8 ' ' in ++ let s = Bytes.make 8 ' ' in + for i=0 to 7 do + if x land (1 lsl i) <> 0 + then s.[7-i] <- '@' + done ; +- s ++ Bytes.to_string s + + let pprint_cursor c = + let { data = data ; mask = mask } = cursor_data c in diff --git a/devel/p5-Alien-Build/Makefile b/devel/p5-Alien-Build/Makefile index 089057c5ce9..b78bfe2ab08 100644 --- a/devel/p5-Alien-Build/Makefile +++ b/devel/p5-Alien-Build/Makefile @@ -1,5 +1,5 @@ PORTNAME= Alien-Build -PORTVERSION= 2.45 +PORTVERSION= 2.48 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/devel/p5-Alien-Build/distinfo b/devel/p5-Alien-Build/distinfo index 809e7cdc7ad..0412ec678e6 100644 --- a/devel/p5-Alien-Build/distinfo +++ b/devel/p5-Alien-Build/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1636117636 -SHA256 (Alien-Build-2.45.tar.gz) = 80984dab9669a70170020c155d793783d1c7664e98aaf299b1a737916db0b466 -SIZE (Alien-Build-2.45.tar.gz) = 319449 +TIMESTAMP = 1648185887 +SHA256 (Alien-Build-2.48.tar.gz) = 3b26e4794f83c681f3eff8f9d052ba74262c1ca5bad30477a41e66ba64391696 +SIZE (Alien-Build-2.48.tar.gz) = 320641 diff --git a/devel/p5-Alien-Build/pkg-plist b/devel/p5-Alien-Build/pkg-plist index d253913562e..171930742e7 100644 --- a/devel/p5-Alien-Build/pkg-plist +++ b/devel/p5-Alien-Build/pkg-plist @@ -75,6 +75,7 @@ %%SITE_PERL%%/Alien/Build/Version/Basic.pm %%SITE_PERL%%/Alien/Build/rc.pm %%SITE_PERL%%/Alien/Role.pm +%%SITE_PERL%%/Alien/Util.pm %%SITE_PERL%%/Test/Alien.pm %%SITE_PERL%%/Test/Alien/Build.pm %%SITE_PERL%%/Test/Alien/CanCompile.pm @@ -160,6 +161,7 @@ %%PERL5_MAN3%%/Alien::Build::Version::Basic.3.gz %%PERL5_MAN3%%/Alien::Build::rc.3.gz %%PERL5_MAN3%%/Alien::Role.3.gz +%%PERL5_MAN3%%/Alien::Util.3.gz %%PERL5_MAN3%%/Test::Alien.3.gz %%PERL5_MAN3%%/Test::Alien::Build.3.gz %%PERL5_MAN3%%/Test::Alien::CanCompile.3.gz diff --git a/devel/p5-App-cpanminus/Makefile b/devel/p5-App-cpanminus/Makefile index 1744328f780..e1be08d519b 100644 --- a/devel/p5-App-cpanminus/Makefile +++ b/devel/p5-App-cpanminus/Makefile @@ -1,5 +1,5 @@ PORTNAME= App-cpanminus -PORTVERSION= 1.7044 +PORTVERSION= 1.7045 CATEGORIES= devel perl5 MASTER_SITES= CPAN MASTER_SITE_SUBDIR= CPAN:MIYAGAWA diff --git a/devel/p5-App-cpanminus/distinfo b/devel/p5-App-cpanminus/distinfo index a4b981c85ef..1ffc8a34890 100644 --- a/devel/p5-App-cpanminus/distinfo +++ b/devel/p5-App-cpanminus/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1633476778 -SHA256 (App-cpanminus-1.7044.tar.gz) = 9b60767fe40752ef7a9d3f13f19060a63389a5c23acc3e9827e19b75500f81f3 -SIZE (App-cpanminus-1.7044.tar.gz) = 316368 +TIMESTAMP = 1648181056 +SHA256 (App-cpanminus-1.7045.tar.gz) = ac4e4adc23fec0ab54f088aca511f5a57d95e6c97a12a1cb98eed1fe0fe0e99c +SIZE (App-cpanminus-1.7045.tar.gz) = 316204 diff --git a/devel/p5-CPAN-Perl-Releases/Makefile b/devel/p5-CPAN-Perl-Releases/Makefile index 9071be82c2c..4f07c3714f3 100644 --- a/devel/p5-CPAN-Perl-Releases/Makefile +++ b/devel/p5-CPAN-Perl-Releases/Makefile @@ -1,7 +1,7 @@ # Created by: Steve Wills PORTNAME= CPAN-Perl-Releases -PORTVERSION= 5.20220220 +PORTVERSION= 5.20220306 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/devel/p5-CPAN-Perl-Releases/distinfo b/devel/p5-CPAN-Perl-Releases/distinfo index 13fe7d06c18..540423bb6be 100644 --- a/devel/p5-CPAN-Perl-Releases/distinfo +++ b/devel/p5-CPAN-Perl-Releases/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057874 -SHA256 (CPAN-Perl-Releases-5.20220220.tar.gz) = f5d83a5135367d42481983e51f23ba7d9b4e386a535fba7f101658b067661de0 -SIZE (CPAN-Perl-Releases-5.20220220.tar.gz) = 22847 +TIMESTAMP = 1647264454 +SHA256 (CPAN-Perl-Releases-5.20220306.tar.gz) = 5f1570a1215c33adc2b5fd86d4ced72a01d129aaf7d691f9aa7dc610ec5aa307 +SIZE (CPAN-Perl-Releases-5.20220306.tar.gz) = 23007 diff --git a/devel/p5-Class-MethodMaker/Makefile b/devel/p5-Class-MethodMaker/Makefile index b3eacd0d68e..a3c40f7c92b 100644 --- a/devel/p5-Class-MethodMaker/Makefile +++ b/devel/p5-Class-MethodMaker/Makefile @@ -2,6 +2,7 @@ PORTNAME= Class-MethodMaker PORTVERSION= 2.24 +PORTREVISION= 1 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- @@ -15,4 +16,7 @@ LICENSE_COMB= dual USES= perl5 USE_PERL5= configure +post-install: + ${STRIP_CMD} ${STAGEDIR}${PREFIX}/${SITE_ARCH_REL}/auto/Class/MethodMaker/MethodMaker.so + .include diff --git a/devel/p5-Devel-MAT-Dumper/Makefile b/devel/p5-Devel-MAT-Dumper/Makefile index 1589a387b02..f00c948ae24 100644 --- a/devel/p5-Devel-MAT-Dumper/Makefile +++ b/devel/p5-Devel-MAT-Dumper/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= Devel-MAT-Dumper -PORTVERSION= 0.42 +PORTVERSION= 0.43 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/devel/p5-Devel-MAT-Dumper/distinfo b/devel/p5-Devel-MAT-Dumper/distinfo index cf040e5ee4d..8df7c4e8446 100644 --- a/devel/p5-Devel-MAT-Dumper/distinfo +++ b/devel/p5-Devel-MAT-Dumper/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1587756746 -SHA256 (Devel-MAT-Dumper-0.42.tar.gz) = bc74191dc39d2331948ce98fe750c5b0064aa71197c1e265c6674e750e64a3bd -SIZE (Devel-MAT-Dumper-0.42.tar.gz) = 31225 +TIMESTAMP = 1647264458 +SHA256 (Devel-MAT-Dumper-0.43.tar.gz) = e09cfb3dd5859097b909aad9b5677149f499d1fb92436430bec76e00b1de1189 +SIZE (Devel-MAT-Dumper-0.43.tar.gz) = 31443 diff --git a/devel/p5-Devel-MAT/Makefile b/devel/p5-Devel-MAT/Makefile index 66b72033883..54313509a10 100644 --- a/devel/p5-Devel-MAT/Makefile +++ b/devel/p5-Devel-MAT/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= Devel-MAT -PORTVERSION= 0.44 +PORTVERSION= 0.45 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/devel/p5-Devel-MAT/distinfo b/devel/p5-Devel-MAT/distinfo index eb216ea03b2..1acdb44c128 100644 --- a/devel/p5-Devel-MAT/distinfo +++ b/devel/p5-Devel-MAT/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1618320014 -SHA256 (Devel-MAT-0.44.tar.gz) = 602d9282abc031e8990fad9d910dda2241d3017f62323ebd8a12a4b003ee7b20 -SIZE (Devel-MAT-0.44.tar.gz) = 131408 +TIMESTAMP = 1647264456 +SHA256 (Devel-MAT-0.45.tar.gz) = a89a4497236424ab31172d1684dd15acb3fd2b6dcdf796a778fa0f18f678f108 +SIZE (Devel-MAT-0.45.tar.gz) = 132301 diff --git a/devel/p5-Devel-MAT/pkg-plist b/devel/p5-Devel-MAT/pkg-plist index ef141c25483..38b15056b8a 100644 --- a/devel/p5-Devel-MAT/pkg-plist +++ b/devel/p5-Devel-MAT/pkg-plist @@ -51,6 +51,7 @@ bin/pmat-list-orphans %%SITE_ARCH%%/auto/share/module/Devel-MAT-UI/icons/strength-strong.svg %%SITE_ARCH%%/auto/share/module/Devel-MAT-UI/icons/strength-weak.svg %%SITE_ARCH%%/auto/share/module/Devel-MAT-UI/icons/type-ARRAY.svg +%%SITE_ARCH%%/auto/share/module/Devel-MAT-UI/icons/type-BOOL.svg %%SITE_ARCH%%/auto/share/module/Devel-MAT-UI/icons/type-CODE.svg %%SITE_ARCH%%/auto/share/module/Devel-MAT-UI/icons/type-FORMAT.svg %%SITE_ARCH%%/auto/share/module/Devel-MAT-UI/icons/type-GLOB.svg diff --git a/devel/p5-Devel-PPPort/Makefile b/devel/p5-Devel-PPPort/Makefile index 700e4eb3fa9..195090c9670 100644 --- a/devel/p5-Devel-PPPort/Makefile +++ b/devel/p5-Devel-PPPort/Makefile @@ -1,5 +1,5 @@ PORTNAME= Devel-PPPort -PORTVERSION= 3.64 +PORTVERSION= 3.67 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/devel/p5-Devel-PPPort/distinfo b/devel/p5-Devel-PPPort/distinfo index 4ac1b8120d4..549d8662c79 100644 --- a/devel/p5-Devel-PPPort/distinfo +++ b/devel/p5-Devel-PPPort/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057876 -SHA256 (Devel-PPPort-3.64.tar.gz) = 060630172f59b0583cf8627d10280746c863942cd22a2583c92ae59cffefcb49 -SIZE (Devel-PPPort-3.64.tar.gz) = 477927 +TIMESTAMP = 1647264460 +SHA256 (Devel-PPPort-3.67.tar.gz) = 77954772eab2a0de4a49a77b334f73ef0b8f5251bdc0ddd70d2a4d8110f66227 +SIZE (Devel-PPPort-3.67.tar.gz) = 480891 diff --git a/devel/p5-Exporter-Lite/Makefile b/devel/p5-Exporter-Lite/Makefile index f7b9bc4f54f..4bdf91476c0 100644 --- a/devel/p5-Exporter-Lite/Makefile +++ b/devel/p5-Exporter-Lite/Makefile @@ -12,8 +12,9 @@ COMMENT= Lightweight exporting of functions and variables LICENSE= ART10 GPLv1+ LICENSE_COMB= dual -NO_ARCH= yes USES= perl5 USE_PERL5= configure +NO_ARCH= yes + .include diff --git a/devel/p5-File-MimeInfo/Makefile b/devel/p5-File-MimeInfo/Makefile index f3272a42c5c..7ba0f997922 100644 --- a/devel/p5-File-MimeInfo/Makefile +++ b/devel/p5-File-MimeInfo/Makefile @@ -1,7 +1,7 @@ # Created by: Rong-En Fan PORTNAME= File-MimeInfo -PORTVERSION= 0.31 +PORTVERSION= 0.32 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/devel/p5-File-MimeInfo/distinfo b/devel/p5-File-MimeInfo/distinfo index 071bc40e850..1e7bfe59fa1 100644 --- a/devel/p5-File-MimeInfo/distinfo +++ b/devel/p5-File-MimeInfo/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642102179 -SHA256 (File-MimeInfo-0.31.tar.gz) = 027303fc193e31674f254d4c1f3a84e8092fca2e5e69d697dd95c06ef487a554 -SIZE (File-MimeInfo-0.31.tar.gz) = 35636 +TIMESTAMP = 1647264462 +SHA256 (File-MimeInfo-0.32.tar.gz) = 5190028d994c003abc0e5556e6095eb17858a8baaa473a4b85b3458e316a8742 +SIZE (File-MimeInfo-0.32.tar.gz) = 37576 diff --git a/devel/p5-Inline-C/Makefile b/devel/p5-Inline-C/Makefile index 32a7b00771c..094bbd028c9 100644 --- a/devel/p5-Inline-C/Makefile +++ b/devel/p5-Inline-C/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= Inline-C -PORTVERSION= 0.81 +PORTVERSION= 0.82 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- @@ -15,7 +15,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE BUILD_DEPENDS= p5-File-ShareDir-Install>=0.06:devel/p5-File-ShareDir-Install \ ${RUN_DEPENDS} -RUN_DEPENDS= p5-Inline>=0.83:devel/p5-Inline \ +RUN_DEPENDS= p5-Inline>=0.86:devel/p5-Inline \ p5-Parse-RecDescent>=1.967009:devel/p5-Parse-RecDescent \ p5-Pegex>=0.66:devel/p5-Pegex TEST_DEPENDS= p5-File-Copy-Recursive>=0:devel/p5-File-Copy-Recursive \ diff --git a/devel/p5-Inline-C/distinfo b/devel/p5-Inline-C/distinfo index 821f448635c..294cd301401 100644 --- a/devel/p5-Inline-C/distinfo +++ b/devel/p5-Inline-C/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1557679332 -SHA256 (Inline-C-0.81.tar.gz) = f185258d9050d7f79b4f00f12625cc469c2f700ff62d3e831cb18d80d2c87aac -SIZE (Inline-C-0.81.tar.gz) = 76855 +TIMESTAMP = 1647264464 +SHA256 (Inline-C-0.82.tar.gz) = 10fbcf1e158d1c8d77e1dd934e379165b126a45c13645ad0be9dc07d151dd0cc +SIZE (Inline-C-0.82.tar.gz) = 79182 diff --git a/devel/p5-Log-TraceMessages/Makefile b/devel/p5-Log-TraceMessages/Makefile index 286da7bd315..62394d9f881 100644 --- a/devel/p5-Log-TraceMessages/Makefile +++ b/devel/p5-Log-TraceMessages/Makefile @@ -16,4 +16,6 @@ RUN_DEPENDS= p5-HTML-FromText>=0:www/p5-HTML-FromText USES= perl5 USE_PERL5= configure +NO_ARCH= yes + .include diff --git a/devel/p5-Module-Load-Util/Makefile b/devel/p5-Module-Load-Util/Makefile index 40d498b6360..d11e9fcb777 100644 --- a/devel/p5-Module-Load-Util/Makefile +++ b/devel/p5-Module-Load-Util/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= Module-Load-Util -PORTVERSION= 0.006 +PORTVERSION= 0.008 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/devel/p5-Module-Load-Util/distinfo b/devel/p5-Module-Load-Util/distinfo index a9d6bf72c18..c1c1f15aedb 100644 --- a/devel/p5-Module-Load-Util/distinfo +++ b/devel/p5-Module-Load-Util/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634111680 -SHA256 (Module-Load-Util-0.006.tar.gz) = 366eec5fa0c62a263870812bbe018ce04125ad25e68fe97d45c55063704119fc -SIZE (Module-Load-Util-0.006.tar.gz) = 19244 +TIMESTAMP = 1647264466 +SHA256 (Module-Load-Util-0.008.tar.gz) = 585ea12cc6ddb125e6aa05c2d91d84e51ebb67aeda21814710209fba04476480 +SIZE (Module-Load-Util-0.008.tar.gz) = 19457 diff --git a/devel/p5-No-Worries/Makefile b/devel/p5-No-Worries/Makefile index 4ea7251d99c..ae2b5b7637e 100644 --- a/devel/p5-No-Worries/Makefile +++ b/devel/p5-No-Worries/Makefile @@ -20,6 +20,6 @@ RUN_DEPENDS= p5-HTTP-Date>=0:www/p5-HTTP-Date \ USES= perl5 USE_PERL5= configure -NO_ARCH= y +NO_ARCH= yes .include diff --git a/devel/p5-Object-Pad/Makefile b/devel/p5-Object-Pad/Makefile index 0494ddbd14c..deca6d9dd27 100644 --- a/devel/p5-Object-Pad/Makefile +++ b/devel/p5-Object-Pad/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= Object-Pad -PORTVERSION= 0.61 +PORTVERSION= 0.63 CATEGORIES= devel perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- @@ -13,7 +13,7 @@ LICENSE= ART10 GPLv1+ LICENSE_COMB= dual LICENSE_FILE= ${WRKSRC}/LICENSE -BUILD_DEPENDS= p5-XS-Parse-Keyword>=0.19:devel/p5-XS-Parse-Keyword \ +BUILD_DEPENDS= p5-XS-Parse-Keyword>=0.22:devel/p5-XS-Parse-Keyword \ p5-XS-Parse-Sublike>=0.15:devel/p5-XS-Parse-Sublike TEST_DEPENDS= p5-Test-Fatal>=0:devel/p5-Test-Fatal \ p5-Test-Refcount>=0:devel/p5-Test-Refcount diff --git a/devel/p5-Object-Pad/distinfo b/devel/p5-Object-Pad/distinfo index 6ecc5ac618d..f2c797b6fe4 100644 --- a/devel/p5-Object-Pad/distinfo +++ b/devel/p5-Object-Pad/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057886 -SHA256 (Object-Pad-0.61.tar.gz) = 1e2522f497fc9689c548db3851364898c8113286bb809bb2d7b22a93326c1949 -SIZE (Object-Pad-0.61.tar.gz) = 132967 +TIMESTAMP = 1647264468 +SHA256 (Object-Pad-0.63.tar.gz) = 55494ecada26846103c115d5364b58201b36fb6bfc8d7d97629da593ccecf00d +SIZE (Object-Pad-0.63.tar.gz) = 138966 diff --git a/devel/p5-Ouch/Makefile b/devel/p5-Ouch/Makefile index 38430c47848..07958177761 100644 --- a/devel/p5-Ouch/Makefile +++ b/devel/p5-Ouch/Makefile @@ -10,11 +10,13 @@ COMMENT= Perl extension for exceptions that don't hurt LICENSE= ART10 GPLv1+ LICENSE_COMB= dual +LICENSE_FILE= ${WRKSRC}/LICENSE TEST_DEPENDS= p5-Test-Trap>=0:devel/p5-Test-Trap -USES= perl5 +USES= perl5 USE_PERL5= configure -NO_ARCH= YES + +NO_ARCH= yes .include diff --git a/devel/p5-Test-Time/Makefile b/devel/p5-Test-Time/Makefile index 039f85cc77a..269e486aff3 100644 --- a/devel/p5-Test-Time/Makefile +++ b/devel/p5-Test-Time/Makefile @@ -7,7 +7,13 @@ PKGNAMEPREFIX= p5- MAINTAINER= perl@FreeBSD.org COMMENT= Perl extension to override time()/sleep() core functions for testing +LICENSE= ART10 GPLv1+ +LICENSE_COMB= dual +LICENSE_FILE= ${WRKSRC}/LICENSE + USES= perl5 USE_PERL5= configure +NO_ARCH= yes + .include diff --git a/devel/p5-Test2-Harness-UI/Makefile b/devel/p5-Test2-Harness-UI/Makefile index ca38d1d2ba3..11dc598ad78 100644 --- a/devel/p5-Test2-Harness-UI/Makefile +++ b/devel/p5-Test2-Harness-UI/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= Test2-Harness-UI -PORTVERSION= 0.000108 +PORTVERSION= 0.000115 CATEGORIES= devel perl5 MASTER_SITES= CPAN MASTER_SITE_SUBDIR= CPAN:EXODIST diff --git a/devel/p5-Test2-Harness-UI/distinfo b/devel/p5-Test2-Harness-UI/distinfo index f2e4f9aaf82..897cbf7933a 100644 --- a/devel/p5-Test2-Harness-UI/distinfo +++ b/devel/p5-Test2-Harness-UI/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057900 -SHA256 (Test2-Harness-UI-0.000108.tar.gz) = bf42a64dcb795af0b90afb6750ece20c08d9567d7af149c4ed0b35984c5fc6da -SIZE (Test2-Harness-UI-0.000108.tar.gz) = 309336 +TIMESTAMP = 1647264474 +SHA256 (Test2-Harness-UI-0.000115.tar.gz) = 8175eb0ff6a4478e922334738ce2d12b1e2cf4ecacefdc01ec42aa458bf3c710 +SIZE (Test2-Harness-UI-0.000115.tar.gz) = 316872 diff --git a/devel/p5-Test2-Harness-UI/pkg-plist b/devel/p5-Test2-Harness-UI/pkg-plist index 9f0303c30a6..1c777521a13 100644 --- a/devel/p5-Test2-Harness-UI/pkg-plist +++ b/devel/p5-Test2-Harness-UI/pkg-plist @@ -1,3 +1,4 @@ +bin/yath-ui-backfill.pl bin/yath-ui-importer.pl bin/yath-ui-sweeper.pl %%SITE_PERL%%/App/Yath/Command/ui.pm @@ -6,6 +7,7 @@ bin/yath-ui-sweeper.pl %%SITE_PERL%%/Test2/Harness/Renderer/UI.pm %%SITE_PERL%%/Test2/Harness/Renderer/UIDB.pm %%SITE_PERL%%/Test2/Harness/UI.pm +%%SITE_PERL%%/Test2/Harness/UI/BackFill.pm %%SITE_PERL%%/Test2/Harness/UI/CPANImporter.pm %%SITE_PERL%%/Test2/Harness/UI/Config.pm %%SITE_PERL%%/Test2/Harness/UI/Controller.pm @@ -46,6 +48,7 @@ bin/yath-ui-sweeper.pl %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL/Permission.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL/PrimaryEmail.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL/Project.pm +%%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL/Reporting.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL/Run.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL/RunField.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL/Session.pm @@ -67,6 +70,7 @@ bin/yath-ui-sweeper.pl %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL56/Permission.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL56/PrimaryEmail.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL56/Project.pm +%%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL56/Reporting.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL56/Run.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL56/RunField.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/MySQL56/Session.pm @@ -87,6 +91,7 @@ bin/yath-ui-sweeper.pl %%SITE_PERL%%/Test2/Harness/UI/Schema/Overlay/Permission.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Overlay/PrimaryEmail.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Overlay/Project.pm +%%SITE_PERL%%/Test2/Harness/UI/Schema/Overlay/Reporting.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Overlay/Run.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Overlay/RunField.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Overlay/Session.pm @@ -108,6 +113,7 @@ bin/yath-ui-sweeper.pl %%SITE_PERL%%/Test2/Harness/UI/Schema/PostgreSQL/Permission.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/PostgreSQL/PrimaryEmail.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/PostgreSQL/Project.pm +%%SITE_PERL%%/Test2/Harness/UI/Schema/PostgreSQL/Reporting.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/PostgreSQL/Run.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/PostgreSQL/RunField.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/PostgreSQL/Session.pm @@ -128,6 +134,7 @@ bin/yath-ui-sweeper.pl %%SITE_PERL%%/Test2/Harness/UI/Schema/Result/Permission.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Result/PrimaryEmail.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Result/Project.pm +%%SITE_PERL%%/Test2/Harness/UI/Schema/Result/Reporting.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Result/Run.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Result/RunField.pm %%SITE_PERL%%/Test2/Harness/UI/Schema/Result/Session.pm diff --git a/devel/p5-Test2-Harness/Makefile b/devel/p5-Test2-Harness/Makefile index ab62d5e7d15..f8cb2c79f3f 100644 --- a/devel/p5-Test2-Harness/Makefile +++ b/devel/p5-Test2-Harness/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= Test2-Harness -PORTVERSION= 1.000110 +PORTVERSION= 1.000111 CATEGORIES= devel perl5 MASTER_SITES= CPAN MASTER_SITE_SUBDIR= CPAN:EXODIST diff --git a/devel/p5-Test2-Harness/distinfo b/devel/p5-Test2-Harness/distinfo index d20cfb320fa..bb920f5cf4e 100644 --- a/devel/p5-Test2-Harness/distinfo +++ b/devel/p5-Test2-Harness/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057898 -SHA256 (Test2-Harness-1.000110.tar.gz) = 7cab7efbed4ca5444149230b74cb9553a4dd8e0ab6665da53b921f57f6065c4e -SIZE (Test2-Harness-1.000110.tar.gz) = 363813 +TIMESTAMP = 1647264472 +SHA256 (Test2-Harness-1.000111.tar.gz) = 02d095c0df5d632d0ba5728b478136711f1d6c4195d5eafb7a7285adbb2e8fac +SIZE (Test2-Harness-1.000111.tar.gz) = 364496 diff --git a/devel/pecl-grpc/Makefile b/devel/pecl-grpc/Makefile index 28d1c91264e..450a857b96b 100644 --- a/devel/pecl-grpc/Makefile +++ b/devel/pecl-grpc/Makefile @@ -1,7 +1,7 @@ # Created by: vanilla@ PORTNAME= grpc -PORTVERSION= 1.44.0 +PORTVERSION= 1.45.0 CATEGORIES= devel net MAINTAINER= vanilla@FreeBSD.org diff --git a/devel/pecl-grpc/distinfo b/devel/pecl-grpc/distinfo index 2cb2894cc53..c63b38a92db 100644 --- a/devel/pecl-grpc/distinfo +++ b/devel/pecl-grpc/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645709977 -SHA256 (PECL/grpc-1.44.0.tgz) = f6d6be7e1bd49b3aae7ada97233fe68172100a71a23e5039acb2c0c1b87e4f11 -SIZE (PECL/grpc-1.44.0.tgz) = 4804039 +TIMESTAMP = 1648365121 +SHA256 (PECL/grpc-1.45.0.tgz) = 48f9c408167cd2c5df5d889526319f3ac4b16410599dab0ef693eef50e649488 +SIZE (PECL/grpc-1.45.0.tgz) = 4912821 diff --git a/devel/phpunit9/Makefile b/devel/phpunit9/Makefile index 43218e14818..9b5945988b4 100644 --- a/devel/phpunit9/Makefile +++ b/devel/phpunit9/Makefile @@ -1,5 +1,5 @@ PORTNAME= phpunit -PORTVERSION= 9.5.16 +PORTVERSION= 9.5.18 CATEGORIES= devel www MASTER_SITES= https://phar.phpunit.de/ PKGNAMESUFFIX= 9${PHP_PKGNAMESUFFIX} diff --git a/devel/phpunit9/distinfo b/devel/phpunit9/distinfo index 03e8ee0fa4a..a09b48d2e00 100644 --- a/devel/phpunit9/distinfo +++ b/devel/phpunit9/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057876 -SHA256 (phpunit-9.5.16.phar) = 342166d3067cb86a4d9620d0a49b9ae4b4bbe25d5d751a0ecfd3ed4e96409b0a -SIZE (phpunit-9.5.16.phar) = 4513114 +TIMESTAMP = 1647264426 +SHA256 (phpunit-9.5.18.phar) = 44ac6b8284418f2cfb8031a6314816f5cb24544366225a227f3ccb1744f33ce2 +SIZE (phpunit-9.5.18.phar) = 4517083 diff --git a/devel/py-DateTime/files/patch-2to3 b/devel/py-DateTime/files/patch-2to3 new file mode 100644 index 00000000000..8a0c5653912 --- /dev/null +++ b/devel/py-DateTime/files/patch-2to3 @@ -0,0 +1,526 @@ +--- src/DateTime/DateTime.py.orig 2013-01-20 10:51:06 UTC ++++ src/DateTime/DateTime.py +@@ -11,7 +11,7 @@ + # + ############################################################################## + +-import copy_reg ++import copyreg + import math + import re + from time import altzone +@@ -23,14 +23,14 @@ from time import timezone + from time import tzname + from datetime import datetime + +-from pytz_support import PytzCache ++from .pytz_support import PytzCache + from zope.interface import implements + +-from interfaces import IDateTime +-from interfaces import DateTimeError +-from interfaces import SyntaxError +-from interfaces import DateError +-from interfaces import TimeError ++from .interfaces import IDateTime ++from .interfaces import DateTimeError ++from .interfaces import SyntaxError ++from .interfaces import DateError ++from .interfaces import TimeError + + default_datefmt = None + +@@ -65,7 +65,7 @@ to_year = int(i * 365 + i / 4 - i / 100 + i / 400 - 69 + to_month = tm[yr % 4 == 0 and (yr % 100 != 0 or yr % 400 == 0)][mo] + EPOCH = ((to_year + to_month + dy + + (hr / 24.0 + mn / 1440.0 + sc / 86400.0)) * 86400) +-jd1901 = 2415385L ++jd1901 = 2415385 + + _TZINFO = PytzCache() + +@@ -201,14 +201,14 @@ def _calcDependentSecond(tz, t): + # Calculates the timezone-dependent second (integer part only) + # from the timezone-independent second. + fset = _tzoffset(tz, t) +- return fset + long(math.floor(t)) + long(EPOCH) - 86400L ++ return fset + int(math.floor(t)) + int(EPOCH) - 86400 + + + def _calcDependentSecond2(yr, mo, dy, hr, mn, sc): + # Calculates the timezone-dependent second (integer part only) + # from the date given. + ss = int(hr) * 3600 + int(mn) * 60 + int(sc) +- x = long(_julianday(yr, mo, dy) - jd1901) * 86400 + ss ++ x = int(_julianday(yr, mo, dy) - jd1901) * 86400 + ss + return x + + +@@ -216,14 +216,14 @@ def _calcIndependentSecondEtc(tz, x, ms): + # Derive the timezone-independent second from the timezone + # dependent second. + fsetAtEpoch = _tzoffset(tz, 0.0) +- nearTime = x - fsetAtEpoch - long(EPOCH) + 86400L + ms ++ nearTime = x - fsetAtEpoch - int(EPOCH) + 86400 + ms + # nearTime is now within an hour of being correct. + # Recalculate t according to DST. +- fset = long(_tzoffset(tz, nearTime)) ++ fset = int(_tzoffset(tz, nearTime)) + d = (x - fset) / 86400.0 + (ms / 86400.0) +- t = x - fset - long(EPOCH) + 86400L + ms ++ t = x - fset - int(EPOCH) + 86400 + ms + micros = (x + 86400 - fset) * 1000000 + \ +- long(round(ms * 1000000.0)) - long(EPOCH * 1000000.0) ++ int(round(ms * 1000000.0)) - int(EPOCH * 1000000.0) + s = d - math.floor(d) + return (s, d, t, micros) + +@@ -250,41 +250,41 @@ def _calcYMDHMS(x, ms): + + + def _julianday(yr, mo, dy): +- y, m, d = long(yr), long(mo), long(dy) +- if m > 12L: +- y = y + m / 12L +- m = m % 12L +- elif m < 1L: ++ y, m, d = int(yr), int(mo), int(dy) ++ if m > 12: ++ y = y + m / 12 ++ m = m % 12 ++ elif m < 1: + m = -m +- y = y - m / 12L - 1L +- m = 12L - m % 12L +- if y > 0L: +- yr_correct = 0L ++ y = y - m / 12 - 1 ++ m = 12 - m % 12 ++ if y > 0: ++ yr_correct = 0 + else: +- yr_correct = 3L +- if m < 3L: +- y, m = y - 1L, m + 12L +- if y * 10000L + m * 100L + d > 15821014L: +- b = 2L - y / 100L + y / 400L ++ yr_correct = 3 ++ if m < 3: ++ y, m = y - 1, m + 12 ++ if y * 10000 + m * 100 + d > 15821014: ++ b = 2 - y / 100 + y / 400 + else: +- b = 0L +- return ((1461L * y - yr_correct) / 4L + +- 306001L * (m + 1L) / 10000L + d + 1720994L + b) ++ b = 0 ++ return ((1461 * y - yr_correct) / 4 + ++ 306001 * (m + 1) / 10000 + d + 1720994 + b) + + + def _calendarday(j): +- j = long(j) +- if (j < 2299160L): +- b = j + 1525L ++ j = int(j) ++ if (j < 2299160): ++ b = j + 1525 + else: +- a = (4L * j - 7468861L) / 146097L +- b = j + 1526L + a - a / 4L +- c = (20L * b - 2442L) / 7305L +- d = 1461L * c / 4L +- e = 10000L * (b - d) / 306001L +- dy = int(b - d - 306001L * e / 10000L) +- mo = (e < 14L) and int(e - 1L) or int(e - 13L) +- yr = (mo > 2) and (c - 4716L) or (c - 4715L) ++ a = (4 * j - 7468861) / 146097 ++ b = j + 1526 + a - a / 4 ++ c = (20 * b - 2442) / 7305 ++ d = 1461 * c / 4 ++ e = 10000 * (b - d) / 306001 ++ dy = int(b - d - 306001 * e / 10000) ++ mo = (e < 14) and int(e - 1) or int(e - 13) ++ yr = (mo > 2) and (c - 4716) or (c - 4715) + return (int(yr), int(mo), int(dy)) + + +@@ -317,7 +317,7 @@ def safegmtime(t): + '''gmtime with a safety zone.''' + try: + t_int = int(t) +- if isinstance(t_int, long): ++ if isinstance(t_int, int): + raise OverflowError # Python 2.3 fix: int can return a long! + return gmtime(t_int) + except (ValueError, OverflowError): +@@ -329,7 +329,7 @@ def safelocaltime(t): + '''localtime with a safety zone.''' + try: + t_int = int(t) +- if isinstance(t_int, long): ++ if isinstance(t_int, int): + raise OverflowError # Python 2.3 fix: int can return a long! + return localtime(t_int) + except (ValueError, OverflowError): +@@ -453,15 +453,15 @@ class DateTime(object): + def __setstate__(self, value): + if isinstance(value, tuple): + self._parse_args(value[0], value[2]) +- self._micros = long(value[0] * 1000000) ++ self._micros = int(value[0] * 1000000) + self._timezone_naive = value[1] + else: +- for k, v in value.items(): ++ for k, v in list(value.items()): + if k in self.__slots__: + setattr(self, k, v) + # BBB: support for very old DateTime pickles + if '_micros' not in value: +- self._micros = long(value['_t'] * 1000000) ++ self._micros = int(value['_t'] * 1000000) + if '_timezone_naive' not in value: + self._timezone_naive = False + +@@ -729,7 +729,7 @@ class DateTime(object): + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + +- elif (isinstance(arg, basestring) and ++ elif (isinstance(arg, str) and + arg.lower() in _TZINFO._zidx): + # Current time, to be displayed in specified timezone + t, tz = time(), _TZINFO._zmap[arg.lower()] +@@ -739,7 +739,7 @@ class DateTime(object): + x = _calcDependentSecond(tz, t) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + +- elif isinstance(arg, basestring): ++ elif isinstance(arg, str): + # Date/time string + + iso8601 = iso8601Match(arg.strip()) +@@ -780,7 +780,7 @@ class DateTime(object): + sc = sc + ms + + elif ac==2: +- if isinstance(args[1], basestring): ++ if isinstance(args[1], str): + # Seconds from epoch (gmt) and timezone + t, tz = args + ms = (t - math.floor(t)) +@@ -800,7 +800,7 @@ class DateTime(object): + x_float = d * 86400.0 + x_floor = math.floor(x_float) + ms = x_float - x_floor +- x = long(x_floor) ++ x = int(x_floor) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + else: +@@ -838,7 +838,7 @@ class DateTime(object): + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + +- self._dayoffset = int((_julianday(yr, mo, dy) + 2L) % 7) ++ self._dayoffset = int((_julianday(yr, mo, dy) + 2) % 7) + # Round to nearest microsecond in platform-independent way. You + # cannot rely on C sprintf (Python '%') formatting to round + # consistently; doing it ourselves ensures that all but truly +@@ -855,7 +855,7 @@ class DateTime(object): + # self._micros is the time since the epoch + # in long integer microseconds. + if microsecs is None: +- microsecs = long(math.floor(t * 1000000.0)) ++ microsecs = int(math.floor(t * 1000000.0)) + self._micros = microsecs + + def localZone(self, ltm=None): +@@ -875,7 +875,7 @@ class DateTime(object): + if not _multipleZones: + return _localzone0 + fsetAtEpoch = _tzoffset(_localzone0, 0.0) +- nearTime = x - fsetAtEpoch - long(EPOCH) + 86400L + ms ++ nearTime = x - fsetAtEpoch - int(EPOCH) + 86400 + ms + # nearTime is within an hour of being correct. + try: + ltm = safelocaltime(nearTime) +@@ -887,7 +887,7 @@ class DateTime(object): + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, 0) + yr = ((yr - 1970) % 28) + 1970 + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) +- nearTime = x - fsetAtEpoch - long(EPOCH) + 86400L + ms ++ nearTime = x - fsetAtEpoch - int(EPOCH) + 86400 + ms + + # nearTime might still be negative if we are east of Greenwich. + # But we can asume on 1969/12/31 were no timezone changes. +@@ -1242,7 +1242,7 @@ class DateTime(object): + long integer microseconds. + """ + if isinstance(t, float): +- return self._micros > long(t * 1000000) ++ return self._micros > int(t * 1000000) + try: + return self._micros > t._micros + except AttributeError: +@@ -1263,7 +1263,7 @@ class DateTime(object): + long integer microseconds. + """ + if isinstance(t, float): +- return self._micros >= long(t * 1000000) ++ return self._micros >= int(t * 1000000) + try: + return self._micros >= t._micros + except AttributeError: +@@ -1283,7 +1283,7 @@ class DateTime(object): + long integer microseconds. + """ + if isinstance(t, float): +- return self._micros == long(t * 1000000) ++ return self._micros == int(t * 1000000) + try: + return self._micros == t._micros + except AttributeError: +@@ -1328,7 +1328,7 @@ class DateTime(object): + long integer microseconds. + """ + if isinstance(t, float): +- return self._micros < long(t * 1000000) ++ return self._micros < int(t * 1000000) + try: + return self._micros < t._micros + except AttributeError: +@@ -1348,7 +1348,7 @@ class DateTime(object): + long integer microseconds. + """ + if isinstance(t, float): +- return self._micros <= long(t * 1000000) ++ return self._micros <= int(t * 1000000) + try: + return self._micros <= t._micros + except AttributeError: +@@ -1543,13 +1543,13 @@ class DateTime(object): + # pass them to strftime and convert them back to unicode if necessary. + + format_is_unicode = False +- if isinstance(format, unicode): ++ if isinstance(format, str): + format = format.encode('utf-8') + format_is_unicode = True + ds = datetime(zself._year, zself._month, zself._day, zself._hour, + zself._minute, int(zself._nearsec), + microseconds).strftime(format) +- return format_is_unicode and unicode(ds, 'utf-8') or ds ++ return format_is_unicode and str(ds, 'utf-8') or ds + + # General formats from previous DateTime + def Date(self): +@@ -1737,7 +1737,7 @@ class DateTime(object): + omicros = round(o * 86400000000) + tmicros = self.micros() + omicros + t = tmicros / 1000000.0 +- d = (tmicros + long(EPOCH*1000000)) / 86400000000.0 ++ d = (tmicros + int(EPOCH*1000000)) / 86400000000.0 + s = d - math.floor(d) + ms = t - math.floor(t) + x = _calcDependentSecond(tz, t) +@@ -1789,7 +1789,7 @@ class DateTime(object): + + def __long__(self): + """Convert to a long-int number of seconds since the epoch (gmt).""" +- return long(self.micros() / 1000000) ++ return int(self.micros() / 1000000) + + def __float__(self): + """Convert to floating-point number of seconds since the epoch (gmt). +@@ -1917,7 +1917,7 @@ class DateTime(object): + # Provide the _dt_reconstructor function here, in case something + # accidentally creates a reference to this function + +-orig_reconstructor = copy_reg._reconstructor ++orig_reconstructor = copyreg._reconstructor + + + def _dt_reconstructor(cls, base, state): +--- src/DateTime/pytz_support.py.orig 2011-12-09 05:59:48 UTC ++++ src/DateTime/pytz_support.py +@@ -18,7 +18,7 @@ import pytz + import pytz.reference + from pytz.tzinfo import StaticTzInfo, memorized_timedelta + from datetime import datetime, timedelta +-from interfaces import DateTimeError ++from .interfaces import DateTimeError + + EPOCH = datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc) + +@@ -198,7 +198,7 @@ def _static_timezone_factory(data): + return cls() + + _numeric_timezones = dict((key, _static_timezone_factory(data)) +- for key, data in _numeric_timezone_data.items()) ++ for key, data in list(_numeric_timezone_data.items())) + + + class Timezone: +@@ -238,7 +238,7 @@ class PytzCache: + _zlst = pytz.common_timezones + _old_zlst # used by DateTime.TimeZones + _zmap = dict((name.lower(), name) for name in pytz.all_timezones) + _zmap.update(_old_zmap) # These must take priority +- _zidx = _zmap.keys() ++ _zidx = list(_zmap.keys()) + + def __getitem__(self, key): + name = self._zmap.get(key.lower(), key) # fallback to key +@@ -248,4 +248,4 @@ class PytzCache: + try: + return Timezone(_numeric_timezones[name]) + except KeyError: +- raise DateTimeError,'Unrecognized timezone: %s' % key ++ raise DateTimeError('Unrecognized timezone: %s' % key) +--- src/DateTime/tests/testDateTime.py.orig 2013-01-20 10:50:50 UTC ++++ src/DateTime/tests/testDateTime.py +@@ -12,7 +12,7 @@ + # + ############################################################################## + +-import cPickle ++import pickle + import math + import os + import time +@@ -22,7 +22,7 @@ from DateTime.DateTime import _findLocalTimeZoneName + from DateTime import DateTime + from datetime import date, datetime, tzinfo, timedelta + import pytz +-import legacy ++from . import legacy + + try: + __file__ +@@ -69,7 +69,7 @@ class DateTimeTests(unittest.TestCase): + def testBug1203(self): + # 01:59:60 occurred in old DateTime + dt = DateTime(7200, 'GMT') +- self.assert_(str(dt).find('60') < 0, dt) ++ self.assertTrue(str(dt).find('60') < 0, dt) + + def testDSTInEffect(self): + # Checks GMT offset for a DST date in the US/Eastern time zone +@@ -116,7 +116,7 @@ class DateTimeTests(unittest.TestCase): + dt = DateTime() + dt1 = DateTime(float(dt), dt.timezone()) + self.assertEqual(str(dt), str(dt1), (dt, dt1)) +- dt1 = DateTime(float(dt), unicode(dt.timezone())) ++ dt1 = DateTime(float(dt), str(dt.timezone())) + self.assertEqual(str(dt), str(dt1), (dt, dt1)) + + def testConstructor6(self): +@@ -154,7 +154,7 @@ class DateTimeTests(unittest.TestCase): + # Fails when an 1800 date is displayed with negative signs + dt = DateTime('1830/5/6 12:31:46.213 pm') + dt1 = dt.toZone('GMT+6') +- self.assert_(str(dt1).find('-') < 0, (dt, dt1)) ++ self.assertTrue(str(dt1).find('-') < 0, (dt, dt1)) + + def testSubtraction(self): + # Reconstruction of a DateTime from its parts, with subtraction +@@ -212,22 +212,22 @@ class DateTimeTests(unittest.TestCase): + + def test_pickle(self): + dt = DateTime() +- data = cPickle.dumps(dt, 1) +- new = cPickle.loads(data) ++ data = pickle.dumps(dt, 1) ++ new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_tz(self): + dt = DateTime('2002/5/2 8:00am GMT+8') +- data = cPickle.dumps(dt, 1) +- new = cPickle.loads(data) ++ data = pickle.dumps(dt, 1) ++ new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_micros(self): + dt = DateTime('2002/5/2 8:00:14.123 GMT+8') +- data = cPickle.dumps(dt, 1) +- new = cPickle.loads(data) ++ data = pickle.dumps(dt, 1) ++ new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + +@@ -245,7 +245,7 @@ class DateTimeTests(unittest.TestCase): + '\x1bM\xd2\x07U\x08_nearsecq\x1cG\x00\x00\x00\x00\x00\x00\x00' + '\x00U\x07_pmhourq\x1dK\x08U\n_dayoffsetq\x1eK\x04U\x04timeq' + '\x1fG?\xd5UUUV\x00\x00ub.') +- new = cPickle.loads(data) ++ new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + +@@ -262,7 +262,7 @@ class DateTimeTests(unittest.TestCase): + '\x04_dayq\x19K\x02U\x05_yearq\x1aM\xd2\x07U\x08_nearsecq' + '\x1bG\x00\x00\x00\x00\x00\x00\x00\x00U\x07_pmhourq\x1cK\x08U' + '\n_dayoffsetq\x1dK\x04U\x04timeq\x1eG?\xd5UUUV\x00\x00ub.') +- new = cPickle.loads(data) ++ new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + +@@ -288,7 +288,7 @@ class DateTimeTests(unittest.TestCase): + dsec = (dt.millis() - dt1.millis()) / 1000.0 + ddays = math.floor((dsec / 86400.0) + 0.5) + +- self.assertEqual(ddays, 3000000L, ddays) ++ self.assertEqual(ddays, 3000000, ddays) + + def test_tzoffset(self): + # Test time-zone given as an offset +@@ -523,7 +523,7 @@ class DateTimeTests(unittest.TestCase): + + def test_calcTimezoneName(self): + from DateTime.interfaces import TimeError +- timezone_dependent_epoch = 2177452800L ++ timezone_dependent_epoch = 2177452800 + try: + DateTime()._calcTimezoneName(timezone_dependent_epoch, 0) + except TimeError: +@@ -556,8 +556,8 @@ class DateTimeTests(unittest.TestCase): + + def testStrftimeUnicode(self): + dt = DateTime('2002-05-02T08:00:00+00:00') +- ok = dt.strftime('Le %d/%m/%Y a %Hh%M').replace('a', u'\xe0') +- self.assertEqual(dt.strftime(u'Le %d/%m/%Y \xe0 %Hh%M'), ok) ++ ok = dt.strftime('Le %d/%m/%Y a %Hh%M').replace('a', '\xe0') ++ self.assertEqual(dt.strftime('Le %d/%m/%Y \xe0 %Hh%M'), ok) + + def testTimezoneNaiveHandling(self): + # checks that we assign timezone naivity correctly +@@ -615,11 +615,11 @@ class DateTimeTests(unittest.TestCase): + t1 = time.mktime(datetime(2002, 1, 1).timetuple()) + t2 = time.mktime(datetime(2002, 7, 1).timetuple()) + +- for name in legacy._zlst + legacy._zmap.keys() + legacy._data.keys(): +- self.failUnless(name.lower() in _TZINFO._zidx, 'legacy timezone %s cannot be looked up' % name) ++ for name in legacy._zlst + list(legacy._zmap.keys()) + list(legacy._data.keys()): ++ self.assertTrue(name.lower() in _TZINFO._zidx, 'legacy timezone %s cannot be looked up' % name) + + failures = [] +- for name, zone in legacy.timezones.iteritems(): ++ for name, zone in legacy.timezones.items(): + newzone = _TZINFO[name] + # The name of the new zone might change (eg GMT+6 rather than GMT+0600) + if zone.info(t1)[:2] != newzone.info(t1)[:2] or zone.info(t2)[:2] != newzone.info(t2)[:2]: +@@ -637,7 +637,7 @@ class DateTimeTests(unittest.TestCase): + + real_failures = list(set(failures).difference(set(expected_failures))) + +- self.failIf(real_failures, '\n'.join(real_failures)) ++ self.assertFalse(real_failures, '\n'.join(real_failures)) + + def testBasicTZ(self): + #psycopg2 supplies it's own tzinfo instances, with no `zone` attribute diff --git a/devel/py-Products.ExternalEditor/files/patch-2to3 b/devel/py-Products.ExternalEditor/files/patch-2to3 new file mode 100644 index 00000000000..55bdf96341c --- /dev/null +++ b/devel/py-Products.ExternalEditor/files/patch-2to3 @@ -0,0 +1,64 @@ +--- Products/ExternalEditor/ExternalEditor.py.orig 2010-12-03 03:58:58 UTC ++++ Products/ExternalEditor/ExternalEditor.py +@@ -18,7 +18,7 @@ + + from string import join # For Zope 2.3 compatibility + import types +-import urllib ++import urllib.request, urllib.parse, urllib.error + from Acquisition import aq_inner, aq_base, aq_parent, Implicit + try: + from App.class_init import InitializeClass +@@ -55,11 +55,11 @@ class PDataStreamIterator: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + if self.data is None: +- raise StopIteration ++ raise(StopIteration) + data = self.data.data +- self.data = self.data.next ++ self.data = self.data.__next__ + return data + + def registerCallback(cb): +@@ -125,8 +125,8 @@ class ExternalEditor(Implicit): + if title is not None: + if callable(title): + title = title() +- if isinstance(title, types.UnicodeType): +- title = unicode.encode(title, 'utf-8') ++ if isinstance(title, str): ++ title = str.encode(title, 'utf-8') + r.append('title:%s' % title) + + if hasattr(aq_base(ob), 'content_type'): +@@ -214,7 +214,7 @@ class ExternalEditor(Implicit): + body = ob.read() + else: + # can't read it! +- raise 'BadRequest', 'Object does not support external editing' ++ raise BadRequest('Object does not support external editing') + + if (HAVE_Z3_IFACE and IStreamIterator.providedBy(body) or + (not HAVE_Z3_IFACE) and IStreamIterator.isImplementedBy(body)): +@@ -287,7 +287,7 @@ def EditLink(self, object, borrow_lock=0, skip_data=0) + if skip_data: + query['skip_data'] = 1 + url = "%s/externalEdit_/%s%s%s" % (aq_parent(aq_inner(object)).absolute_url(), +- urllib.quote(object.getId()), ++ urllib.parse.quote(object.getId()), + ext, querystr(query)) + return ('' +@@ -302,7 +302,7 @@ def querystr(d): + """Create a query string from a dict""" + if d: + return '?' + '&'.join( +- ['%s=%s' % (name, val) for name, val in d.items()]) ++ ['%s=%s' % (name, val) for name, val in list(d.items())]) + else: + return '' + diff --git a/devel/py-Products.PloneLanguageTool/Makefile b/devel/py-Products.PloneLanguageTool/Makefile index ac545369e1b..0ff2f9f0960 100644 --- a/devel/py-Products.PloneLanguageTool/Makefile +++ b/devel/py-Products.PloneLanguageTool/Makefile @@ -1,6 +1,6 @@ PORTNAME= Products.PloneLanguageTool PORTVERSION= 3.2.7 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel python zope MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -15,4 +15,7 @@ NO_ARCH= yes USES= python:3.6+ zip USE_PYTHON= autoplist distutils +post-patch: + @${RM} ${WRKSRC}/Products/PloneLanguageTool/skins/LanguageTool/isTranslatable.py + .include diff --git a/devel/py-aiosignal/Makefile b/devel/py-aiosignal/Makefile new file mode 100644 index 00000000000..1b3a6d73abb --- /dev/null +++ b/devel/py-aiosignal/Makefile @@ -0,0 +1,22 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= aiosignal +PORTVERSION= 1.2.0 +CATEGORIES= devel python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= List of registered asynchronous callbacks + +LICENSE= APACHE20 +LICENSE_FILE= ${WRKSRC}/LICENSE + +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}frozenlist>=1.1.0:devel/py-frozenlist@${PY_FLAVOR} + +USES= python:3.6+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +.include diff --git a/devel/py-aiosignal/distinfo b/devel/py-aiosignal/distinfo new file mode 100644 index 00000000000..f155499706a --- /dev/null +++ b/devel/py-aiosignal/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264400 +SHA256 (aiosignal-1.2.0.tar.gz) = 78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2 +SIZE (aiosignal-1.2.0.tar.gz) = 19692 diff --git a/devel/py-aiosignal/pkg-descr b/devel/py-aiosignal/pkg-descr new file mode 100644 index 00000000000..12bb231b16e --- /dev/null +++ b/devel/py-aiosignal/pkg-descr @@ -0,0 +1,12 @@ +Signal is a list of registered asynchronous callbacks. + +The signal's life-cycle has two stages: after creation its content could be +filled by using standard list operations: sig.append() etc. + +After you call sig.freeze() the signal is frozen: adding, removing and dropping +callbacks is forbidden. + +The only available operation is calling the previously registered callbacks by +using await sig.send(data). + +WWW: https://github.com/aio-libs/aiosignal diff --git a/devel/py-anyjson/files/patch-2to3 b/devel/py-anyjson/files/patch-2to3 new file mode 100644 index 00000000000..081cab6becf --- /dev/null +++ b/devel/py-anyjson/files/patch-2to3 @@ -0,0 +1,44 @@ +--- anyjson/__init__.py.orig 2012-06-21 23:08:51 UTC ++++ anyjson/__init__.py +@@ -85,8 +85,8 @@ class _JsonImplementation(object): + TypeError if the object could not be serialized.""" + try: + return self._encode(data) +- except self._encode_error, exc: +- raise TypeError, TypeError(*exc.args), sys.exc_info()[2] ++ except self._encode_error as exc: ++ raise TypeError(TypeError(*exc.args)).with_traceback(sys.exc_info()[2]) + serialize = dumps + + def loads(self, s): +@@ -97,8 +97,8 @@ class _JsonImplementation(object): + if self._filedecode and not isinstance(s, basestring): + return self._filedecode(StringIO(s)) + return self._decode(s) +- except self._decode_error, exc: +- raise ValueError, ValueError(*exc.args), sys.exc_info()[2] ++ except self._decode_error as exc: ++ raise ValueError(ValueError(*exc.args)).with_traceback(sys.exc_info()[2]) + deserialize = loads + + +@@ -117,7 +117,7 @@ if __name__ == "__main__": + # We do NOT try to load a compatible module because that may throw an + # exception, which renders the package uninstallable with easy_install + # (It trys to execfile the script when installing, to make sure it works) +- print "Running anyjson as a stand alone script is not supported" ++ print("Running anyjson as a stand alone script is not supported") + sys.exit(1) + else: + for modspec in _modules: +--- setup.py.orig 2012-06-21 22:59:59 UTC ++++ setup.py +@@ -2,8 +2,6 @@ import os + import sys + + extra = {} +-if sys.version_info >= (3, 0): +- extra.update(use_2to3=True) + + try: + from setuptools import setup, find_packages diff --git a/devel/py-apache_conf_parser/files/patch-indent b/devel/py-apache_conf_parser/files/patch-indent new file mode 100644 index 00000000000..8f9fe46a78a --- /dev/null +++ b/devel/py-apache_conf_parser/files/patch-indent @@ -0,0 +1,13 @@ +--- apache_conf_parser.py.orig 2011-04-12 03:06:39 UTC ++++ apache_conf_parser.py +@@ -466,8 +466,8 @@ class ApacheConfParser(ComplexNode): + def __init__(self, source, infile=True, delay=False, count=None): + """Count is the starting number for line counting...""" + super(ApacheConfParser, self).__init__(_NODES) +- self.source = source.splitlines() +- if infile: ++ self.source = source.splitlines() ++ if infile: + self.source = (line.strip("\n") for line in open(source)) + self.count = count + if not delay: diff --git a/devel/py-astroid/Makefile b/devel/py-astroid/Makefile index 9cd8344575e..5e430a292e9 100644 --- a/devel/py-astroid/Makefile +++ b/devel/py-astroid/Makefile @@ -1,5 +1,5 @@ PORTNAME= astroid -PORTVERSION= 2.9.3 +PORTVERSION= 2.11.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -11,7 +11,7 @@ LICENSE= LGPL21+ LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}lazy-object-proxy>=1.4.0:devel/py-lazy-object-proxy@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}wrapt>=1.11<1.14:devel/py-wrapt@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}wrapt>=1.11<2:devel/py-wrapt@${PY_FLAVOR} TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0:devel/py-pytest@${PY_FLAVOR} USES= python:3.6+ diff --git a/devel/py-astroid/distinfo b/devel/py-astroid/distinfo index ad1368fb990..93797b8126c 100644 --- a/devel/py-astroid/distinfo +++ b/devel/py-astroid/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133733 -SHA256 (astroid-2.9.3.tar.gz) = 1efdf4e867d4d8ba4a9f6cf9ce07cd182c4c41de77f23814feb27ca93ca9d877 -SIZE (astroid-2.9.3.tar.gz) = 207480 +TIMESTAMP = 1647264510 +SHA256 (astroid-2.11.0.tar.gz) = d56f6a5239981567ceaa3413d4971936c7b4b0c796fcff90c24254dcf8b05b62 +SIZE (astroid-2.11.0.tar.gz) = 205275 diff --git a/devel/py-asyncio/Makefile b/devel/py-asyncio/Makefile deleted file mode 100644 index 1f31cbb4533..00000000000 --- a/devel/py-asyncio/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -# Created by: Kubilay Kocak - -PORTNAME= asyncio -PORTVERSION= 3.4.3 -CATEGORIES= devel python -MASTER_SITES= CHEESESHOP -PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} - -MAINTAINER= sunpoet@FreeBSD.org -COMMENT= Reference implementation of PEP 3156 (Asynchronous IO) - -LICENSE= APACHE20 -LICENSE_FILE= ${WRKSRC}/COPYING - -USES= python:3.3+ -USE_PYTHON= autoplist distutils -NO_ARCH= yes - -do-test: - @cd ${WRKSRC} && ${PYTHON_CMD} ${PYDISTUTILS_SETUP} test - -.include diff --git a/devel/py-asyncio/distinfo b/devel/py-asyncio/distinfo deleted file mode 100644 index b5ae8d3d726..00000000000 --- a/devel/py-asyncio/distinfo +++ /dev/null @@ -1,2 +0,0 @@ -SHA256 (asyncio-3.4.3.tar.gz) = 83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41 -SIZE (asyncio-3.4.3.tar.gz) = 204411 diff --git a/devel/py-asyncio/pkg-descr b/devel/py-asyncio/pkg-descr deleted file mode 100644 index dc5e82c34b4..00000000000 --- a/devel/py-asyncio/pkg-descr +++ /dev/null @@ -1,3 +0,0 @@ -"Tulip", the reference implementation of PEP 3156: Asynchronous IO Support. - -WWW: https://github.com/python/asyncio diff --git a/devel/py-azure-core/Makefile b/devel/py-azure-core/Makefile index 44e32908480..d3aa26e8f22 100644 --- a/devel/py-azure-core/Makefile +++ b/devel/py-azure-core/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= azure-core -PORTVERSION= 1.22.1 +PORTVERSION= 1.23.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,7 +12,8 @@ COMMENT= Microsoft Azure Core Library for Python LICENSE= MIT RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}requests>=2.18.4:www/py-requests@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}six>=1.11.0:devel/py-six@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}six>=1.11.0:devel/py-six@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}typing-extensions>=4.0.1:devel/py-typing-extensions@${PY_FLAVOR} USES= python:3.6+ zip USE_PYTHON= autoplist concurrent distutils diff --git a/devel/py-azure-core/distinfo b/devel/py-azure-core/distinfo index be7de756b24..be47245b3b2 100644 --- a/devel/py-azure-core/distinfo +++ b/devel/py-azure-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057930 -SHA256 (azure-core-1.22.1.zip) = 4b6e405268a33b873107796495cec3f2f1b1ffe935624ce0fbddff36d38d3a4d -SIZE (azure-core-1.22.1.zip) = 368450 +TIMESTAMP = 1647264512 +SHA256 (azure-core-1.23.0.zip) = a56a6f720d0948d3f3e4a25a5fe46df2f1b7f865c358d74e2ce47dbb49262608 +SIZE (azure-core-1.23.0.zip) = 371683 diff --git a/devel/py-behave/files/patch-setup.py b/devel/py-behave/files/patch-setup.py new file mode 100644 index 00000000000..04722791c1e --- /dev/null +++ b/devel/py-behave/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2018-02-25 16:27:09 UTC ++++ setup.py +@@ -100,8 +100,6 @@ setup( + "pylint", + ], + }, +- # MAYBE-DISABLE: use_2to3 +- use_2to3= bool(python_version >= 3.0), + license="BSD", + classifiers=[ + "Development Status :: 4 - Beta", diff --git a/devel/py-cachy/Makefile b/devel/py-cachy/Makefile index a39a2aca4de..fd569924ea6 100644 --- a/devel/py-cachy/Makefile +++ b/devel/py-cachy/Makefile @@ -2,6 +2,7 @@ PORTNAME= cachy PORTVERSION= 0.3.0 +PORTREVISION= 1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-cachy/files/patch-setup.py b/devel/py-cachy/files/patch-setup.py index 97616bb8ce3..2e90428514f 100644 --- a/devel/py-cachy/files/patch-setup.py +++ b/devel/py-cachy/files/patch-setup.py @@ -1,6 +1,16 @@ --- setup.py.orig 1970-01-01 00:00:00 UTC +++ setup.py -@@ -14,7 +14,7 @@ package_data = \ +@@ -5,16 +5,14 @@ packages = \ + ['cachy', + 'cachy.contracts', + 'cachy.serializers', +- 'cachy.stores', +- 'tests', +- 'tests.stores'] ++ 'cachy.stores'] + + package_data = \ + {'': ['*']} extras_require = \ {'memcached': ['python-memcached>=1.59,<2.0'], diff --git a/devel/py-canonicaljson/Makefile b/devel/py-canonicaljson/Makefile index 9eda08e8eca..d79567401cd 100644 --- a/devel/py-canonicaljson/Makefile +++ b/devel/py-canonicaljson/Makefile @@ -1,7 +1,7 @@ # Created by: Brendan Molloy PORTNAME= canonicaljson -PORTVERSION= 1.5.0 +PORTVERSION= 1.6.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,8 +12,7 @@ COMMENT= Canonical JSON LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}frozendict>=1.0:devel/py-frozendict@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}simplejson>=3.14.0:devel/py-simplejson@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}simplejson>=3.14.0:devel/py-simplejson@${PY_FLAVOR} TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0:devel/py-pytest@${PY_FLAVOR} USES= python:3.5+ @@ -21,7 +20,12 @@ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +OPTIONS_DEFINE= FROZENDICT +FROZENDICT_DESC= Frozendict immutable dictionaries support + +FROZENDICT_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}frozendict>=1.0:devel/py-frozendict@${PY_FLAVOR} + do-test: - cd ${WRKSRC} && ${PYTHON_CMD} -m pytest + cd ${WRKSRC} && ${PYTHON_CMD} -m pytest -rs -v .include diff --git a/devel/py-canonicaljson/distinfo b/devel/py-canonicaljson/distinfo index c6fe85ea5ae..66af84f4ec8 100644 --- a/devel/py-canonicaljson/distinfo +++ b/devel/py-canonicaljson/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1635181041 -SHA256 (canonicaljson-1.5.0.tar.gz) = 5ebddcd74c5b066ee68ecca56465338e9b1380467e0a906f4797697c9fb381e2 -SIZE (canonicaljson-1.5.0.tar.gz) = 9991 +TIMESTAMP = 1647264514 +SHA256 (canonicaljson-1.6.0.tar.gz) = 8739d5fd91aca7281d425660ae65af7663808c8177778965f67e90b16a2b2427 +SIZE (canonicaljson-1.6.0.tar.gz) = 9959 diff --git a/devel/py-cdg/files/patch-2to3 b/devel/py-cdg/files/patch-2to3 new file mode 100644 index 00000000000..5210191bc20 --- /dev/null +++ b/devel/py-cdg/files/patch-2to3 @@ -0,0 +1,16 @@ +--- cdg/__init__.py.orig 2017-07-20 18:17:28 UTC ++++ cdg/__init__.py +@@ -31,11 +31,11 @@ def load(stream, filename): + cg = yaml.load(stream, Loader=Loader) + + else: +- raise ValueError, 'Unhandled file type: %s' % filename ++ raise ValueError('Unhandled file type: %s' % filename) + + graph = networkx.DiGraph(comment='Callgraph of %s' % filename) + +- for (name, props) in cg['functions'].items(): ++ for (name, props) in list(cg['functions'].items()): + graph.add_node(name) + + for (k, v) in props['attributes'] if 'attributes' in props else []: diff --git a/devel/py-cliff/Makefile b/devel/py-cliff/Makefile index 757108725ed..d7f19b74253 100644 --- a/devel/py-cliff/Makefile +++ b/devel/py-cliff/Makefile @@ -1,7 +1,7 @@ # Created by: Alexander Nusov PORTNAME= cliff -PORTVERSION= 3.10.0 +PORTVERSION= 3.10.1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-cliff/distinfo b/devel/py-cliff/distinfo index 3026a4d2de1..6a8b70bc4fa 100644 --- a/devel/py-cliff/distinfo +++ b/devel/py-cliff/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643998572 -SHA256 (cliff-3.10.0.tar.gz) = c68aac08d0d25853234a38fdbf1f33503849af3d5d677a4d0aacd42b0be6a4a1 -SIZE (cliff-3.10.0.tar.gz) = 82636 +TIMESTAMP = 1647264516 +SHA256 (cliff-3.10.1.tar.gz) = 045aee3f3c64471965d7ad507ce8474a4e2f20815fbb5405a770f8596a2a00a0 +SIZE (cliff-3.10.1.tar.gz) = 82764 diff --git a/devel/py-cloudpickle/Makefile b/devel/py-cloudpickle/Makefile index a3eb407810f..8e0c5ec5390 100644 --- a/devel/py-cloudpickle/Makefile +++ b/devel/py-cloudpickle/Makefile @@ -1,7 +1,7 @@ # Created by: Kubilay Kocak PORTNAME= cloudpickle -PORTVERSION= 1.3.0 +PORTVERSION= 2.0.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,16 +12,18 @@ COMMENT= Extended pickling support for Python objects LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE -TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>0:devel/py-pytest@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}psutil>0:sysutils/py-psutil@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}numpy>=0,1:math/py-numpy@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}psutil>=0:sysutils/py-psutil@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pytest>=0:devel/py-pytest@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}tornado>=0:www/py-tornado@${PY_FLAVOR} USES= python:3.6+ -USE_PYTHON= autoplist distutils +USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes -# Need -s for https://github.com/cloudpipe/cloudpickle/issues/252 do-test: - @cd ${WRKSRC} && ${PYTHON_CMD} -m pytest -v -rs -s +# Need -s for https://github.com/cloudpipe/cloudpickle/issues/252 + cd ${WRKSRC} && ${SETENV} PYTHONPATH=tests/cloudpickle_testpkg ${PYTHON_CMD} -m pytest -rs -s -v .include diff --git a/devel/py-cloudpickle/distinfo b/devel/py-cloudpickle/distinfo index ec3ee34f7f8..1716f9bf0f1 100644 --- a/devel/py-cloudpickle/distinfo +++ b/devel/py-cloudpickle/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1596283802 -SHA256 (cloudpickle-1.3.0.tar.gz) = 38af54d0e7705d87a287bdefe1df00f936aadb1f629dca383e825cca927fa753 -SIZE (cloudpickle-1.3.0.tar.gz) = 46220 +TIMESTAMP = 1647264518 +SHA256 (cloudpickle-2.0.0.tar.gz) = 5cd02f3b417a783ba84a4ec3e290ff7929009fe51f6405423cfccfadd43ba4a4 +SIZE (cloudpickle-2.0.0.tar.gz) = 60038 diff --git a/devel/py-cloudpickle/pkg-descr b/devel/py-cloudpickle/pkg-descr index 356d4f0e94e..c512bf67a3d 100644 --- a/devel/py-cloudpickle/pkg-descr +++ b/devel/py-cloudpickle/pkg-descr @@ -1,12 +1,14 @@ -cloudpickle makes it possible to serialize Python constructs not -supported by the default pickle module from the Python standard library. +cloudpickle makes it possible to serialize Python constructs not supported by +the default pickle module from the Python standard library. -cloudpickle is especially useful for cluster computing where Python -expressions are shipped over the network to execute on remote hosts, -possibly close to the data. +cloudpickle is especially useful for cluster computing where Python code is +shipped over the network to execute on remote hosts, possibly close to the data. -Among other things, cloudpickle supports pickling for lambda -expressions, functions and classes defined interactively in the -`__main__` module. +Among other things, cloudpickle supports pickling for lambda functions along +with functions and classes defined interactively in the __main__ module (for +instance in a script, a shell or a Jupyter notebook). + +Cloudpickle can only be used to send objects between the exact same version of +Python. WWW: https://github.com/cloudpipe/cloudpickle diff --git a/devel/py-d2to1/Makefile b/devel/py-d2to1/Makefile index 1299ab2c7e7..e95eb9b3270 100644 --- a/devel/py-d2to1/Makefile +++ b/devel/py-d2to1/Makefile @@ -17,6 +17,8 @@ USES= python:3.6+ USE_GITHUB= yes USE_PYTHON= distutils autoplist +NO_ARCH= yes + GH_ACCOUNT= embray do-test: diff --git a/devel/py-daemon-runner/files/patch-2to3 b/devel/py-daemon-runner/files/patch-2to3 new file mode 100644 index 00000000000..210ddfa2f2f --- /dev/null +++ b/devel/py-daemon-runner/files/patch-2to3 @@ -0,0 +1,11 @@ +--- daemon_runner.py.orig 2014-04-28 21:03:22 UTC ++++ daemon_runner.py +@@ -68,7 +68,7 @@ def watch_process(args, pid_file=None): + + try: + pidfile = acquire_pidfile_lock(pid_file) +- except Exception, e: ++ except Exception as e: + sys.stderr.write("Couldn't acquire pidfile lock {0}, owned by {1} ({2})\n".format(pid_file, get_pid(pid_file), e)) + sys.exit(1) + diff --git a/devel/py-dal/files/patch-2to3 b/devel/py-dal/files/patch-2to3 new file mode 100644 index 00000000000..fa087e1eeac --- /dev/null +++ b/devel/py-dal/files/patch-2to3 @@ -0,0 +1,604 @@ +--- dal/dbapi/db_row.py.orig 2008-07-25 06:05:12 UTC ++++ dal/dbapi/db_row.py +@@ -275,11 +275,11 @@ class MetaFields(type): + + for f in fields: + if type(f) is not str: +- raise TypeError, 'Field names must be ASCII strings' ++ raise TypeError('Field names must be ASCII strings') + if not f: +- raise ValueError, 'Field names cannot be empty' ++ raise ValueError('Field names cannot be empty') + if f in field_names: +- raise ValueError, 'Field names must be unique: %s' % f ++ raise ValueError('Field names must be unique: %s' % f) + + slots.append(f) + field_names[f] = 1 +@@ -313,7 +313,7 @@ class IMetaFields(MetaFields): + try: + ifields = tuple( [ f.lower() for f in fields ] ) + except AttributeError: +- raise TypeError, 'Field names must be ASCII strings' ++ raise TypeError('Field names must be ASCII strings') + + super(IMetaFields,cls).build_properties(cls, ifields, field_dict) + field_dict['__fields__'] = tuple(fields) +@@ -459,7 +459,7 @@ except ImportError: + super(IFieldsBase, self).__setattr__(key.lower(),None) + + +-class Fields(FieldsBase): ++class Fields(FieldsBase, metaclass=MetaFields): + '''Fields: + + A tuple-like base-class that gains properties to allow access to +@@ -469,12 +469,10 @@ class Fields(FieldsBase): + is case-sensitive, though case-insensitive objects may be created by + inheriting from the IFields base-class. + ''' +- +- __metaclass__ = MetaFields + __slots__ = () + + +-class IFields(IFieldsBase): ++class IFields(IFieldsBase, metaclass=IMetaFields): + '''IFields: + + A tuple-like base-class that gains properties to allow access to +@@ -484,8 +482,6 @@ class IFields(IFieldsBase): + is case-insensitive, though case-sensitive objects may be created by + inheriting from the Fields base-class. + ''' +- +- __metaclass__ = IMetaFields + __slots__ = () + + +@@ -513,7 +509,7 @@ except ImportError: + try: + return getattr(self.fields,key) + except AttributeError: +- raise KeyError,key ++ raise KeyError(key) + return self.fields.__getitem__(key) + + def __setitem__(self, key, value): +@@ -521,7 +517,7 @@ except ImportError: + try: + setattr(self.fields,key,value) + except AttributeError: +- raise KeyError,key ++ raise KeyError(key) + else: + self.fields.__setitem__(key,value) + +@@ -530,7 +526,7 @@ except ImportError: + try: + delattr(self.fields,key) + except AttributeError: +- raise KeyError,key ++ raise KeyError(key) + else: + self.fields.__delitem__(key) + +@@ -544,7 +540,7 @@ except ImportError: + self.fields.__delslice__(i, j) + + def __hash__(self): +- raise NotImplementedError,'Row objects are not hashable' ++ raise NotImplementedError('Row objects are not hashable') + + def __len__(self): + return len(self.fields) +@@ -622,7 +618,7 @@ class Row(RowBase): + + def items(self): + '''r.items() -> tuple of r's (field, value) pairs, as 2-tuples''' +- return zip(self.keys(),self.fields) ++ return list(zip(list(self.keys()),self.fields)) + + def get(self, key, default=None): + if not isinstance(key, str): +@@ -638,14 +634,14 @@ class Row(RowBase): + + def dict(self): + '''r.dict() -> dictionary mapping r's fields to its values''' +- return dict(self.items()) ++ return dict(list(self.items())) + + def copy(self): + '''r.copy() -> a shallow copy of r''' + return type(self)(self) + + def __hash__(self): +- raise NotImplementedError,'Row objects are not hashable' ++ raise NotImplementedError('Row objects are not hashable') + + + class IRow(Row): +@@ -662,7 +658,7 @@ class IRow(Row): + def has_key(self, key): + if isinstance(key, str): + key = key.lower() +- return super(IRow, self).has_key(key) ++ return key in super(IRow, self) + + + class MetaRowBase(type): +@@ -789,7 +785,7 @@ class NullRow(type(Nothing)): + return 0 + def __ne__(self, other): + return 1 +- def __nonzero__(self): ++ def __bool__(self): + return 0 + + +@@ -802,21 +798,21 @@ def test(cls): + assert d['c']==d[2]==d.fields.c==d.fields[2]==3 + + assert len(d) == 3 +- assert d.has_key('a') +- assert d.has_key('B') +- assert d.has_key('c') ++ assert 'a' in d ++ assert 'B' in d ++ assert 'c' in d + assert 'd' not in d + assert 1 in d + assert 2 in d + assert 3 in d + assert 4 not in d +- assert not d.has_key(4) +- assert not d.has_key('d') ++ assert 4 not in d ++ assert 'd' not in d + assert d[-1] == 3 + assert d[1:3] == (2,3) + +- assert d.keys() == ('a','B','c') +- assert d.items() == [('a', 1), ('B', 2), ('c', 3)] ++ assert list(d.keys()) == ('a','B','c') ++ assert list(d.items()) == [('a', 1), ('B', 2), ('c', 3)] + assert d.dict() == {'a': 1, 'c': 3, 'B': 2} + assert d.copy() == d + assert d == d.copy() +@@ -849,19 +845,19 @@ def test(cls): + + try: + d[4] +- raise AssertionError, 'Illegal index not caught' ++ raise AssertionError('Illegal index not caught') + except IndexError: + pass + + try: + d['f'] +- raise AssertionError, 'Illegal key not caught' ++ raise AssertionError('Illegal key not caught') + except KeyError: + pass + + try: + d.fields.f +- raise AssertionError, 'Illegal attribute not caught' ++ raise AssertionError('Illegal attribute not caught') + except AttributeError: + pass + +@@ -874,14 +870,14 @@ def test_insensitive(cls): + assert d['b']==d['B']==d[1]==d.fields.B==d.fields.b==d.fields[1]==2 + assert d['c']==d['C']==d[2]==d.fields.C==d.fields.c==d.fields[2]==3 + +- assert d.has_key('a') +- assert d.has_key('A') +- assert d.has_key('b') +- assert d.has_key('B') +- assert d.has_key('c') +- assert d.has_key('C') +- assert not d.has_key('d') +- assert not d.has_key('D') ++ assert 'a' in d ++ assert 'A' in d ++ assert 'b' in d ++ assert 'B' in d ++ assert 'c' in d ++ assert 'C' in d ++ assert 'd' not in d ++ assert 'D' not in d + + assert 1 in d + assert 2 in d +@@ -975,37 +971,37 @@ def test_rw(cls): + + try: + d['g'] = 'illegal' +- raise AssertionError,'Illegal setitem' ++ raise AssertionError('Illegal setitem') + except KeyError: + pass + + try: + del d['g'] +- raise AssertionError,'Illegal delitem' ++ raise AssertionError('Illegal delitem') + except KeyError: + pass + + try: + d[5] = 'illegal' +- raise AssertionError,'Illegal setitem' ++ raise AssertionError('Illegal setitem') + except IndexError: + pass + + try: + del d[5] +- raise AssertionError,'Illegal delitem' ++ raise AssertionError('Illegal delitem') + except IndexError: + pass + + try: + d.fields.g = 'illegal' +- raise AssertionError,'Illegal setattr' ++ raise AssertionError('Illegal setattr') + except AttributeError: + pass + + try: + del d.fields.g +- raise AssertionError,'Illegal delattr' ++ raise AssertionError('Illegal delattr') + except AttributeError: + pass + +@@ -1066,25 +1062,25 @@ def test_incomplete(cls): + + try: + d['B'] +- raise AssertionError,'Illegal getitem: "%s"' % d['B'] ++ raise AssertionError('Illegal getitem: "%s"' % d['B']) + except KeyError: + pass + + try: + d['c'] +- raise AssertionError,'Illegal getitem' ++ raise AssertionError('Illegal getitem') + except KeyError: + pass + + try: + d.fields.b +- raise AssertionError,'Illegal getattr' ++ raise AssertionError('Illegal getattr') + except AttributeError: + pass + + try: + d.fields.c +- raise AssertionError,'Illegal getattr' ++ raise AssertionError('Illegal getattr') + except AttributeError: + pass + +@@ -1121,12 +1117,12 @@ if __name__ == '__main__': + gc.collect() + new_objects = len(gc.get_objects()) - orig_objects + if new_objects >= N: +- print "WARNING: Detected memory leak of %d objects." % new_objects ++ print("WARNING: Detected memory leak of %d objects." % new_objects) + if sys.version_info >= (2,2,2): +- print " Please notify jacobs@theopalgroup.com immediately." ++ print(" Please notify jacobs@theopalgroup.com immediately.") + else: +- print " You are running a Python older than 2.2.1 or older. Several" +- print " memory leaks in the core interepreter were fixed in version" +- print " 2.2.2, so we strongly recommend upgrading." ++ print(" You are running a Python older than 2.2.1 or older. Several") ++ print(" memory leaks in the core interepreter were fixed in version") ++ print(" 2.2.2, so we strongly recommend upgrading.") + +- print 'Tests passed' ++ print('Tests passed') +--- dal/dbapi/dbapi.py.orig 2008-10-16 05:52:44 UTC ++++ dal/dbapi/dbapi.py +@@ -83,9 +83,9 @@ print cs.fetchone()[0] + + __revision__ = 0.1 + +-import dbtime +-import dbexceptions +-import paramstyles ++from . import dbtime ++from . import dbexceptions ++from . import paramstyles + + class MWrapper(object): + """Wraps DBAPI2 driver.""" +@@ -149,10 +149,10 @@ class MWrapper(object): + assert dtmodname in ('py', 'mx', 'native') + if dtmodname == 'py': + if not dbtime.have_datetime: +- raise Exception, 'datetime module not available.' ++ raise Exception('datetime module not available.') + elif dtmodname == 'mx': + if not dbtime.have_mxDateTime: +- raise Exception, 'mx.DateTime module not available.' ++ raise Exception('mx.DateTime module not available.') + self.__dtmod = dtmodname + + dtmod = property(__getDtMod, __setDtMod) +@@ -162,7 +162,7 @@ class MWrapper(object): + + def __setUseDbRow(self, use_db_row): + if use_db_row: +- import db_row ++ from . import db_row + globals()['db_row'] = db_row + self.__use_db_row = use_db_row + +@@ -265,7 +265,7 @@ class Cursor(object): + def __setDbRow(self, use_db_row): + """Set value of use_db_row for cursor.""" + if use_db_row: +- import db_row ++ from . import db_row + globals()['db_row'] = db_row + self.__use_db_row = use_db_row + +@@ -406,7 +406,7 @@ class Cursor(object): + elif self._mwrapper._convert_bool and typelist[i] == self._driver.BOOLEAN: + boolpos.append(i) + # loop through data to make changes +- for i in xrange(len(results)): ++ for i in range(len(results)): + set = results[i] + # make datetime objects + if len(datepos) > 0 or len(boolpos) > 0: +--- dal/dbapi/dbtime.py.orig 2008-10-03 11:24:17 UTC ++++ dal/dbapi/dbtime.py +@@ -98,7 +98,7 @@ def mx2pydt(mxdt): + else: + return mx2pydtdelta(mxdt) + else: +- raise Exception, 'Not a mx datetime type.' ++ raise Exception('Not a mx datetime type.') + + # Python datetime to mx.DateTime conversion functions + +@@ -148,7 +148,7 @@ def py2mxdt(pydt): + elif type(pydt) == datetime.timedelta: + return py2mxdtdelta(pydt) + else: +- raise Exception, 'Not a Python datetime type.' ++ raise Exception('Not a Python datetime type.') + + # Date and Time constructors + +@@ -160,7 +160,7 @@ def construct_date(dtpref, year, month, day): + return mx.DateTime.Date(year, month, day) + else: + # what exception should be raised here? +- raise Exception, 'Improper DATETIME set.' ++ raise Exception('Improper DATETIME set.') + + def construct_time(dtpref, hour, minute, second): + """Creates time object for preferred type.""" +@@ -170,7 +170,7 @@ def construct_time(dtpref, hour, minute, second): + return mx.DateTime.Time(hour, minute, second) + else: + # what exception should be raised here? +- raise Exception, 'Improper DATETIME set.' ++ raise Exception('Improper DATETIME set.') + + def construct_timestamp(dtpref, year, month, day, hour, minute, second): + """Creates timestamp object for preferred type.""" +@@ -180,7 +180,7 @@ def construct_timestamp(dtpref, year, month, day, hour + return mx.DateTime.DateTime(year, month, day, hour, minute, second) + else: + # what exception should be raised here? +- raise Exception, 'Improper DATETIME set.' ++ raise Exception('Improper DATETIME set.') + + def construct_datefromticks(dtpref, ticks): + """Creates date object for preferred type and ticks.""" +@@ -190,7 +190,7 @@ def construct_datefromticks(dtpref, ticks): + return mx.DateTime.DateFromTicks(ticks) + else: + # what exception should be raised here? +- raise Exception, 'Improper DATETIME set.' ++ raise Exception('Improper DATETIME set.') + + def construct_timefromticks(dtpref, ticks): + """Creates time object for preferred type and ticks.""" +@@ -200,7 +200,7 @@ def construct_timefromticks(dtpref, ticks): + return mx.DateTime.TimeFromTicks(ticks) + else: + # what exception should be raised here? +- raise Exception, 'Improper DATETIME set.' ++ raise Exception('Improper DATETIME set.') + + def construct_timestampfromticks(dtpref, ticks): + """Creates timestamp object for preferred type and ticks.""" +@@ -210,7 +210,7 @@ def construct_timestampfromticks(dtpref, ticks): + return mx.DateTime.localtime(ticks) + else: + # what exception should be raised here? +- raise Exception, 'Improper DATETIME set.' ++ raise Exception('Improper DATETIME set.') + + # Other functions + +@@ -250,25 +250,25 @@ def dtsubnative(dtpref, dbmod, params): + # not a datetime field + pass + else: +- raise ValueError, 'dbpref value not known.' ++ raise ValueError('dbpref value not known.') + return nparam + + def convert_dparams(dparams): + # Convert dictionary of parameters. +- for key, value in dparams.items(): ++ for key, value in list(dparams.items()): + dparams[key] = convertdt(value) + return dparams + + if type(params) == dict: + params = convert_dparams(params) + elif type(params) == list: +- for key in xrange(len(params)): ++ for key in range(len(params)): + if type(params[key]) == dict: + params[key] = convert_dparams(params[key]) + else: + params[key] = convertdt(params[key]) + else: +- raise ValueError, 'params should be list or dict.' ++ raise ValueError('params should be list or dict.') + return params + + def native2pref(nativedt, pref, dt_type=None, conv_func=None): +@@ -283,7 +283,7 @@ def native2pref(nativedt, pref, dt_type=None, conv_fun + elif dto_class == 'mx' and pref == 'py': + return mx2pydt(dto) + else: +- raise Exception, 'unknown dto_class/pref combination' ++ raise Exception('unknown dto_class/pref combination') + if isinstance(nativedt, datetime.datetime) and nativedt.tzinfo == None and server_tzinfo != None and local_tzinfo != None: + nativedt = datetime.datetime(nativedt.year, nativedt.month, nativedt.day, nativedt.hour, nativedt.minute, nativedt.second, nativedt.microsecond, server_tzinfo).astimezone(local_tzinfo) + # what type of object is this? +@@ -428,5 +428,5 @@ def main(): + assert pydtd.microseconds == mxdtd_msec + + if __name__ == '__main__': +- for i in xrange(1000): ++ for i in range(1000): + main() +--- dal/dbapi/dtuple.py.orig 2008-07-25 06:05:12 UTC ++++ dal/dbapi/dtuple.py +@@ -48,7 +48,7 @@ class TupleDescriptor: + """ + self.desc = tuple(desc) + ### validate the names? +- self.names = map(lambda x: x[0], desc) ++ self.names = [x[0] for x in desc] + self.namemap = { } + for i in range(len(self.names)): + self.namemap[self.names[i]] = i +@@ -145,7 +145,7 @@ class DatabaseTuple: + def __setattr__(self, name, value): + 'Simulate attribute-access via column names' + ### need to redirect into a db update +- raise TypeError, "can't assign to this subscripted object" ++ raise TypeError("can't assign to this subscripted object") + + def __getitem__(self, key): + 'Simulate indexed (tuple/list) and mapping-style access' +@@ -157,9 +157,9 @@ class DatabaseTuple: + 'Simulate indexed (tuple/list) and mapping-style access' + if type(key) == type(1): + ### need to redirect into a db update of elem #key +- raise TypeError, "can't assign to this subscripted object" ++ raise TypeError("can't assign to this subscripted object") + ### need to redirect into a db update of elem named key +- raise TypeError, "can't assign to this subscripted object" ++ raise TypeError("can't assign to this subscripted object") + + def __len__(self): + return len(self._data_) +@@ -171,7 +171,7 @@ class DatabaseTuple: + def __setslice__(self, i, j, list): + 'Simulate list/tuple slicing access' + ### need to redirect into a db update of elems +- raise TypeError, "can't assign to this subscripted object" ++ raise TypeError("can't assign to this subscripted object") + + def _keys_(self): + "Simulate mapping's methods" +@@ -183,7 +183,7 @@ class DatabaseTuple: + + def _items_(self): + "Simulate mapping's methods" +- return self.asMapping().items() ++ return list(self.asMapping().items()) + + def _count_(self, item): + "Simulate list's methods" +@@ -214,7 +214,7 @@ class DatabaseTuple: + def asMapping(self): + 'Return the "tuple" as a real mapping' + value = { } +- for name, idx in self._desc_.namemap.items(): ++ for name, idx in list(self._desc_.namemap.items()): + value[name] = self._data_[idx] + return value + +@@ -224,4 +224,4 @@ class DatabaseTuple: + + def asList(self): + 'Return the "list" as a real mapping' +- return map(None, self._data_) ++ return list(self._data_) +--- dal/dbapi/paramstyles.py.orig 2008-07-25 06:05:12 UTC ++++ dal/dbapi/paramstyles.py +@@ -256,7 +256,7 @@ def segmentize( string ): + if current_segment != '': + segments.append(current_segment) + if quoted: +- raise SegmentizeError, 'Unmatched quotes in string' ++ raise SegmentizeError('Unmatched quotes in string') + + return segments + +@@ -334,7 +334,7 @@ def convert( from_paramstyle, to_paramstyle, query, pa + try: + convert_function = CONVERSION_MATRIX[from_paramstyle][to_paramstyle] + except KeyError: +- raise NotImplementedError, 'Unsupported paramstyle conversion: %s to %s' % (from_paramstyle, to_paramstyle) ++ raise NotImplementedError('Unsupported paramstyle conversion: %s to %s' % (from_paramstyle, to_paramstyle)) + + new_query, new_params = convert_function(query, params) + +@@ -362,25 +362,25 @@ if __name__ == '__main__': + } + indent = 4 + width = 16 +- print '' +- print '[ PARAMSTYLE TRANSLATIONS ]' +- print '' ++ print('') ++ print('[ PARAMSTYLE TRANSLATIONS ]') ++ print('') + for from_paramstyle in PARAMSTYLES['all']: + query = tests[from_paramstyle][0] + params = tests[from_paramstyle][1] +- print '' +- print '%s[ %s ]' % (' ' * indent, from_paramstyle.upper()) +- print '' ++ print('') ++ print('%s[ %s ]' % (' ' * indent, from_paramstyle.upper())) ++ print('') + label = 'query' +- print '%s%s%s: %s' % (' ' * indent, label, '.' * (width + indent - len(label)), query) ++ print('%s%s%s: %s' % (' ' * indent, label, '.' * (width + indent - len(label)), query)) + label = 'paramstyle' +- print '%s%s%s: %s' % (' ' * indent, label, '.' * (width + indent - len(label)), from_paramstyle) +- print '' ++ print('%s%s%s: %s' % (' ' * indent, label, '.' * (width + indent - len(label)), from_paramstyle)) ++ print('') + for to_paramstyle in PARAMSTYLES['all']: + converted_query, converted_params = convert(from_paramstyle, to_paramstyle, query, params) + label = '%s_query' % (to_paramstyle) +- print '%s%s%s: %s' % (' ' * indent * 2, label, '.' * (width - len(label)), converted_query) ++ print('%s%s%s: %s' % (' ' * indent * 2, label, '.' * (width - len(label)), converted_query)) + label = '%s_params' % (to_paramstyle) +- print '%s%s%s: %s' % (' ' * indent * 2, label, '.' * (width - len(label)), converted_params) +- print '' ++ print('%s%s%s: %s' % (' ' * indent * 2, label, '.' * (width - len(label)), converted_params)) ++ print('') + diff --git a/devel/py-dask/Makefile b/devel/py-dask/Makefile index f1531eed7c2..9f59eead8c7 100644 --- a/devel/py-dask/Makefile +++ b/devel/py-dask/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= dask -PORTVERSION= 2022.1.1 +PORTVERSION= 2022.2.1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -20,7 +20,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}cloudpickle>=1.1.1:devel/py-cloudpickle@${PY ${PYTHON_PKGNAMEPREFIX}toolz>=0.8.2:devel/py-toolz@${PY_FLAVOR} TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0:devel/py-pytest@${PY_FLAVOR} -USES= cpe python:3.7+ +USES= cpe python:3.8+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/devel/py-dask/distinfo b/devel/py-dask/distinfo index d6a7e150989..d85dc13b8f3 100644 --- a/devel/py-dask/distinfo +++ b/devel/py-dask/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971042 -SHA256 (dask-2022.1.1.tar.gz) = 3d5e935792d8a5a61d19cb7e63771ee02cdfd6122e36beb15c2dad6257320c58 -SIZE (dask-2022.1.1.tar.gz) = 4908032 +TIMESTAMP = 1647264520 +SHA256 (dask-2022.2.1.tar.gz) = b699da18d147da84c6c0be26d724dc1ec384960bf1f23c8db4f90740c9ac0a89 +SIZE (dask-2022.2.1.tar.gz) = 5018580 diff --git a/devel/py-datadog/Makefile b/devel/py-datadog/Makefile index fe3268f76b9..c226b395a32 100644 --- a/devel/py-datadog/Makefile +++ b/devel/py-datadog/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= datadog -PORTVERSION= 0.43.0 +PORTVERSION= 0.44.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -14,7 +14,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}requests>=2.6.0:www/py-requests@${PY_FLAVOR} -USES= python:3.6+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes @@ -22,4 +22,7 @@ NO_ARCH= yes # bin/dog CONFLICTS_INSTALL= dog +post-patch: + @${CP} ${FILESDIR}/setup.py ${WRKSRC}/ + .include diff --git a/devel/py-datadog/distinfo b/devel/py-datadog/distinfo index b2e7b903497..6bed4a8a0d6 100644 --- a/devel/py-datadog/distinfo +++ b/devel/py-datadog/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641045908 -SHA256 (datadog-0.43.0.tar.gz) = 1f2123083d9e1add6f238c62714b76ac2fc134d7d1c435cd82b976487b191b96 -SIZE (datadog-0.43.0.tar.gz) = 348833 +TIMESTAMP = 1647264522 +SHA256 (datadog-0.44.0.tar.gz) = 071170f0c7ef22511dbf7f9bd76c4be500ee2d3d52072900a5c87b5495d2c733 +SIZE (datadog-0.44.0.tar.gz) = 340022 diff --git a/devel/py-datadog/files/setup.py b/devel/py-datadog/files/setup.py new file mode 100644 index 00000000000..dafa20eac95 --- /dev/null +++ b/devel/py-datadog/files/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +from setuptools import setup + +setup( + name='datadog', + version='0.44.0', + description='The Datadog Python library', + long_description='# The Datadog Python library\n\n[![Unit Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.unit?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=10&branchName=master)\n[![Integration Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.integration?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=13&branchName=master)\n[![Documentation Status](https://readthedocs.org/projects/datadogpy/badge/?version=latest)](https://readthedocs.org/projects/datadogpy/?badge=latest)\n[![PyPI - Version](https://img.shields.io/pypi/v/datadog.svg)](https://pypi.org/project/datadog)\n[![PyPI - Downloads](https://pepy.tech/badge/datadog)](https://pepy.tech/project/datadog)\n\nThe Datadog Python Library is a collection of tools suitable for inclusion in existing Python projects or for the development of standalone scripts. It provides an abstraction on top of Datadog\'s raw HTTP interface and the Agent\'s DogStatsD metrics aggregation server, to interact with Datadog and efficiently report events and metrics.\n\n- Library Documentation: https://datadogpy.readthedocs.io/en/latest/\n- HTTP API Documentation: https://docs.datadoghq.com/api/\n- DatadogHQ: https://datadoghq.com\n\nSee [CHANGELOG.md](https://github.com/DataDog/datadogpy/blob/master/CHANGELOG.md) for changes.\n\n## Installation\n\nTo install from pip:\n\n pip install datadog\n\nTo install from source:\n\n python setup.py install\n\n## Datadog API\n\nTo support all Datadog HTTP APIs, a generated library is\navailable which will expose all the endpoints:\n[datadog-api-client-python](https://github.com/DataDog/datadog-api-client-python).\n\nFind below a working example for submitting an event to your Event Stream:\n\n```python\nfrom datadog import initialize, api\n\noptions = {\n "api_key": "",\n "app_key": "",\n}\n\ninitialize(**options)\n\ntitle = "Something big happened!"\ntext = "And let me tell you all about it here!"\ntags = ["version:1", "application:web"]\n\napi.Event.create(title=title, text=text, tags=tags)\n```\n\n**Consult the full list of supported Datadog API endpoints with working code examples in [the Datadog API documentation](https://docs.datadoghq.com/api/latest/?code-lang=python).**\n\n**Note**: The full list of available Datadog API endpoints is also available in the [Datadog Python Library documentation](https://datadogpy.readthedocs.io/en/latest/)\n\n#### Environment Variables\n\nAs an alternate method to using the `initialize` function with the `options` parameters, set the environment variables `DATADOG_API_KEY` and `DATADOG_APP_KEY` within the context of your application.\n\nIf `DATADOG_API_KEY` or `DATADOG_APP_KEY` are not set, the library attempts to fall back to Datadog\'s APM environment variable prefixes: `DD_API_KEY` and `DD_APP_KEY`.\n\n```python\nfrom datadog import initialize, api\n\n# Assuming you\'ve set `DD_API_KEY` and `DD_APP_KEY` in your env,\n# initialize() will pick it up automatically\ninitialize()\n\ntitle = "Something big happened!"\ntext = "And let me tell you all about it here!"\ntags = ["version:1", "application:web"]\n\napi.Event.create(title=title, text=text, tags=tags)\n```\n\nIn development, you can disable any `statsd` metric collection using `DD_DOGSTATSD_DISABLE=True` (or any not-empty value).\n\n## DogStatsD\n\nIn order to use DogStatsD metrics, the Agent must be [running and available](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python).\n\n### Instantiate the DogStatsD client with UDP\n\nOnce the Datadog Python Library is installed, instantiate the StatsD client using UDP in your code:\n\n```python\nfrom datadog import initialize, statsd\n\noptions = {\n "statsd_host": "127.0.0.1",\n "statsd_port": 8125,\n}\n\ninitialize(**options)\n```\n\nSee the full list of available [DogStatsD client instantiation parameters](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python#client-instantiation-parameters).\n\n#### Instantiate the DogStatsd client with UDS\n\nOnce the Datadog Python Library is installed, instantiate the StatsD client using UDS in your code:\n```python\n\nfrom datadog import initialize, statsd\n\noptions = {\n "statsd_socket_path": PATH_TO_SOCKET,\n}\n\ninitialize(**options)\n```\n\n#### Origin detection over UDP and UDS\n\nOrigin detection is a method to detect which pod `DogStatsD` packets are coming from in order to add the pod\'s tags to the tag list.\nThe `DogStatsD` client attaches an internal tag, `entity_id`. The value of this tag is the content of the `DD_ENTITY_ID` environment variable if found, which is the pod\'s UID. The Datadog Agent uses this tag to add container tags to the metrics. To avoid overwriting this global tag, make sure to only `append` to the `constant_tags` list.\n\nTo enable origin detection over UDP, add the following lines to your application manifest\n```yaml\nenv:\n - name: DD_ENTITY_ID\n valueFrom:\n fieldRef:\n fieldPath: metadata.uid\n```\n\n### Usage\n#### Metrics\n\nAfter the client is created, you can start sending custom metrics to Datadog. See the dedicated [Metric Submission: DogStatsD documentation](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python) to see how to submit all supported metric types to Datadog with working code examples:\n\n* [Submit a COUNT metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#count).\n* [Submit a GAUGE metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#gauge).\n* [Submit a SET metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#set)\n* [Submit a HISTOGRAM metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#histogram)\n* [Submit a TIMER metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#timer)\n* [Submit a DISTRIBUTION metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#distribution)\n\nSome options are supported when submitting metrics, like [applying a Sample Rate to your metrics](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-submission-options) or [tagging your metrics with your custom tags](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-tagging).\n\n#### Events\n\nAfter the client is created, you can start sending events to your Datadog Event Stream. See the dedicated [Event Submission: DogStatsD documentation](https://docs.datadoghq.com/events/guides/dogstatsd/?code-lang=python) to see how to submit an event to your Datadog Event Stream.\n\n#### Service Checks\n\nAfter the client is created, you can start sending Service Checks to Datadog. See the dedicated [Service Check Submission: DogStatsD documentation](https://docs.datadoghq.com/developers/service_checks/dogstatsd_service_checks_submission/?code-lang=python) to see how to submit a Service Check to Datadog.\n\n### Monitoring this client\n\nThis client automatically injects telemetry about itself in the DogStatsD stream.\nThose metrics will not be counted as custom and will not be billed. This feature can be disabled using the `statsd.disable_telemetry()` method.\n\nSee [Telemetry documentation](https://docs.datadoghq.com/developers/dogstatsd/high_throughput/?code-lang=python#client-side-telemetry) to learn more about it.\n\n### Benchmarks\n\n_Note: You will need to install `psutil` package before running the benchmarks._\n\nIf you would like to get an approximate idea on the throughput that your DogStatsD library\ncan handle on your system, you can run the included local benchmark code:\n\n```sh-session\n$ # Python 2 Example\n$ python2 -m unittest -vvv tests.performance.test_statsd_throughput\n\n$ # Python 3 Example\n$ python3 -m unittest -vvv tests.performance.test_statsd_throughput\n```\n\nYou can also add set `BENCHMARK_*` to customize the runs:\n```sh-session\n$ # Example #1\n$ BENCHMARK_NUM_RUNS=10 BENCHMARK_NUM_THREADS=1 BENCHMARK_NUM_DATAPOINTS=5000 BENCHMARK_TRANSPORT="UDP" python2 -m unittest -vvv tests.performance.test_statsd_throughput\n\n$ # Example #2\n$ BENCHMARK_NUM_THREADS=10 BENCHMARK_TRANSPORT="UDS" python3 -m unittest -vvv tests.performance.test_statsd_throughput\n```\n\n## Maximum packets size in high-throughput scenarios\n\nIn order to have the most efficient use of this library in high-throughput scenarios,\ndefault values for the maximum packets size have already been set for both UDS (8192 bytes)\nand UDP (1432 bytes) in order to have the best usage of the underlying network.\nHowever, if you perfectly know your network and you know that a different value for the maximum packets\nsize should be used, you can set it with the parameter `max_buffer_len`. Example:\n\n```python\nfrom datadog import initialize\n\noptions = {\n "api_key": "",\n "app_key": "",\n "max_buffer_len": 4096,\n}\n\ninitialize(**options)\n```\n\n## Thread Safety\n\n`DogStatsD` and `ThreadStats` are thread-safe.\n', + author_email='"Datadog, Inc." ', + classifiers=[ + 'License :: OSI Approved :: BSD License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: Implementation :: CPython', + 'Programming Language :: Python :: Implementation :: PyPy', + ], + install_requires=[ + 'configparser<5; python_version < "3.0"', + 'requests>=2.6.0', + 'typing; python_version < "3.5"', + ], + entry_points={ + 'console_scripts': [ + 'dog = datadog.dogshell:main', + 'dogshell = datadog.dogshell:main', + 'dogshellwrap = datadog.dogshell.wrap:main', + 'dogwrap = datadog.dogshell.wrap:main', + ], + }, + packages=[ + 'datadog', + 'datadog.api', + 'datadog.dogshell', + 'datadog.dogstatsd', + 'datadog.threadstats', + 'datadog.util', + 'tests', + 'tests.integration', + 'tests.integration.api', + 'tests.integration.dogshell', + 'tests.performance', + 'tests.unit', + 'tests.unit.api', + 'tests.unit.dogstatsd', + 'tests.unit.dogwrap', + 'tests.unit.threadstats', + 'tests.unit.util', + 'tests.util', + ], +) diff --git a/devel/py-ddtrace/Makefile b/devel/py-ddtrace/Makefile index 146950ff1ac..6799c4d9e3c 100644 --- a/devel/py-ddtrace/Makefile +++ b/devel/py-ddtrace/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= ddtrace -PORTVERSION= 0.58.5 +PORTVERSION= 0.59.1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-ddtrace/distinfo b/devel/py-ddtrace/distinfo index 15892f2b716..50bbe74b5f9 100644 --- a/devel/py-ddtrace/distinfo +++ b/devel/py-ddtrace/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057936 -SHA256 (ddtrace-0.58.5.tar.gz) = 3e1d36938de39b5283014b50b4e6af3233611419fdbf38f5496369ebd5a97257 -SIZE (ddtrace-0.58.5.tar.gz) = 1643673 +TIMESTAMP = 1647264524 +SHA256 (ddtrace-0.59.1.tar.gz) = 0bca77c488e101ca17ded6ef0dcc1eb493fc5e8d085efe3eeb73047138083844 +SIZE (ddtrace-0.59.1.tar.gz) = 1655581 diff --git a/devel/py-distributed/Makefile b/devel/py-distributed/Makefile index 7ceed3bd8f2..8d708e2712e 100644 --- a/devel/py-distributed/Makefile +++ b/devel/py-distributed/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= distributed -PORTVERSION= 2022.1.1 +PORTVERSION= 2022.2.1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,7 +13,7 @@ LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE.txt RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}click>=6.6:devel/py-click@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}cloudpickle>=1.3.0:devel/py-cloudpickle@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}cloudpickle>=1.5.0:devel/py-cloudpickle@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}dask>=${PORTVERSION}:devel/py-dask@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}Jinja2>=0:devel/py-Jinja2@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}msgpack>=0.6.0:devel/py-msgpack@${PY_FLAVOR} \ @@ -26,7 +26,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}click>=6.6:devel/py-click@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}tornado>=6.0.3:www/py-tornado@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}zict>=0.1.3:devel/py-zict@${PY_FLAVOR} -USES= python:3.7+ +USES= python:3.8+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/devel/py-distributed/distinfo b/devel/py-distributed/distinfo index 7bfd4292a76..b72054966d4 100644 --- a/devel/py-distributed/distinfo +++ b/devel/py-distributed/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971044 -SHA256 (distributed-2022.1.1.tar.gz) = c227b5dd2c784830917679487f049fb943c154f1993f187d5c52076aa78a72d0 -SIZE (distributed-2022.1.1.tar.gz) = 905723 +TIMESTAMP = 1647264526 +SHA256 (distributed-2022.2.1.tar.gz) = fb62a75af8ef33bbe1aa80a68c01a33a93c1cd5a332dd017ab44955bf7ecf65b +SIZE (distributed-2022.2.1.tar.gz) = 916223 diff --git a/devel/py-distributed/files/patch-requirements.txt b/devel/py-distributed/files/patch-requirements.txt deleted file mode 100644 index 1b462c9c744..00000000000 --- a/devel/py-distributed/files/patch-requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ ---- requirements.txt.orig 2022-01-28 20:26:29 UTC -+++ requirements.txt -@@ -1,5 +1,5 @@ - click >= 6.6 --cloudpickle >= 1.5.0 -+cloudpickle >= 1.3.0 - dask == 2022.01.1 - jinja2 - msgpack >= 0.6.0 diff --git a/devel/py-epc/Makefile b/devel/py-epc/Makefile new file mode 100644 index 00000000000..00e99fefe0f --- /dev/null +++ b/devel/py-epc/Makefile @@ -0,0 +1,17 @@ +PORTNAME= epc +DISTVERSION= 0.0.5 +CATEGORIES= devel python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= ashish@FreeBSD.org +COMMENT= RPC Stack for Emacs Lisp for Python + +LICENSE= GPLv3 + +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}sexpdata>=0.0.3:devel/py-sexpdata@${PY_FLAVOR} + +USES= python +USE_PYTHON= autoplist distutils + +.include diff --git a/devel/py-epc/distinfo b/devel/py-epc/distinfo new file mode 100644 index 00000000000..413bd5af808 --- /dev/null +++ b/devel/py-epc/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647095104 +SHA256 (epc-0.0.5.tar.gz) = a14d2ea74817955a20eb00812e3a4630a132897eb4d976420240f1152c0d7d25 +SIZE (epc-0.0.5.tar.gz) = 17968 diff --git a/devel/py-epc/pkg-descr b/devel/py-epc/pkg-descr new file mode 100644 index 00000000000..4b6ab46c7cf --- /dev/null +++ b/devel/py-epc/pkg-descr @@ -0,0 +1,6 @@ +EPC is an RPC stack for Emacs Lisp and Python-EPC is its +server side and client side implementation in Python. Using +Python-EPC, you can easily call Emacs Lisp functions from +Python and Python functions from Emacs + +WWW: https://pypi.python.org/pypi/epc diff --git a/devel/py-executing/Makefile b/devel/py-executing/Makefile index dd242da89d9..dede3ffb267 100644 --- a/devel/py-executing/Makefile +++ b/devel/py-executing/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= executing -PORTVERSION= 0.8.2 +PORTVERSION= 0.8.3 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-executing/distinfo b/devel/py-executing/distinfo index 9c3f62f170c..37e1c15c06a 100644 --- a/devel/py-executing/distinfo +++ b/devel/py-executing/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634111738 -SHA256 (executing-0.8.2.tar.gz) = c23bf42e9a7b9b212f185b1b2c3c91feb895963378887bb10e64a2e612ec0023 -SIZE (executing-0.8.2.tar.gz) = 481669 +TIMESTAMP = 1647264528 +SHA256 (executing-0.8.3.tar.gz) = c6554e21c6b060590a6d3be4b82fb78f8f0194d809de5ea7df1c093763311501 +SIZE (executing-0.8.3.tar.gz) = 482046 diff --git a/devel/py-extremes/files/patch-2to3 b/devel/py-extremes/files/patch-2to3 new file mode 100644 index 00000000000..20f6e6b77e1 --- /dev/null +++ b/devel/py-extremes/files/patch-2to3 @@ -0,0 +1,11 @@ +--- peak/util/extremes.py.orig 2007-11-21 06:50:24 UTC ++++ peak/util/extremes.py +@@ -32,7 +32,7 @@ class Extreme(object): # Courtesy of PEP 326 + return self.__cmp__(other)>=0 + + def __ne__(self,other): +- return self.__cmp__(other)<>0 ++ return self.__cmp__(other)!=0 + + Max = Extreme(1, "Max") + Min = Extreme(-1, "Min") diff --git a/devel/py-extremes/files/patch-setup.py b/devel/py-extremes/files/patch-setup.py index 3b52e147150..709c3bf3954 100644 --- a/devel/py-extremes/files/patch-setup.py +++ b/devel/py-extremes/files/patch-setup.py @@ -1,5 +1,5 @@ ---- setup.py.orig Wed Nov 21 14:25:46 2007 -+++ setup.py Tue Jan 22 11:28:34 2008 +--- setup.py.orig 2009-02-05 04:14:48 UTC ++++ setup.py @@ -1,8 +1,6 @@ #!/usr/bin/env python """Distutils setup file""" diff --git a/devel/py-flatbuffers/Makefile b/devel/py-flatbuffers/Makefile index 2a30b85d678..bdf206f70a2 100644 --- a/devel/py-flatbuffers/Makefile +++ b/devel/py-flatbuffers/Makefile @@ -1,5 +1,5 @@ PORTNAME= flatbuffers -PORTVERSION= 1.11 +PORTVERSION= 1.12 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -8,20 +8,18 @@ MAINTAINER= sunpoet@FreeBSD.org COMMENT= FlatBuffers serialization format for Python LICENSE= APACHE20 -#LICENSE_FILE= Not packaged in PyPI sdist -USES= cpe python:3.6+ -CPE_VENDOR= google -USE_PYTHON= autoplist distutils - -OPTIONS_DEFINE= NUMPY - -NUMPY_DESC= Support scalar vectors as numpy arrays - -NUMPY_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}numpy>0:math/py-numpy@${PY_FLAVOR} - -MAKE_ENV+= VERSION=${PORTVERSION} +USES= cpe python:3.7+ +USE_PYTHON= autoplist concurrent distutils +MAKE_ENV= VERSION=${PORTVERSION} NO_ARCH= yes +CPE_VENDOR= google + +OPTIONS_DEFINE= NUMPY +NUMPY_DESC= Support scalar vectors as numpy arrays + +NUMPY_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}numpy>=0,1:math/py-numpy@${PY_FLAVOR} + .include diff --git a/devel/py-flatbuffers/distinfo b/devel/py-flatbuffers/distinfo index 3597359f0c3..92dda806a40 100644 --- a/devel/py-flatbuffers/distinfo +++ b/devel/py-flatbuffers/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1559472952 -SHA256 (flatbuffers-1.11.tar.gz) = f24185db54193540e3d684dc98aa7c2d89882341641548ceb36fd2589fef6c4e -SIZE (flatbuffers-1.11.tar.gz) = 10711 +TIMESTAMP = 1647264530 +SHA256 (flatbuffers-1.12.tar.gz) = 63bb9a722d5e373701913e226135b28a6f6ac200d5cc7b4d919fa38d73b44610 +SIZE (flatbuffers-1.12.tar.gz) = 11286 diff --git a/devel/py-flatbuffers/pkg-descr b/devel/py-flatbuffers/pkg-descr index eca252df135..b134916d621 100644 --- a/devel/py-flatbuffers/pkg-descr +++ b/devel/py-flatbuffers/pkg-descr @@ -1,3 +1,9 @@ +FlatBuffers is a cross platform serialization library architected for maximum +memory efficiency. It allows you to directly access serialized data without +parsing/unpacking it first, while still having great forwards/backwards +compatibility. + Python runtime library for use with the Flatbuffers serialization format. WWW: https://google.github.io/flatbuffers/ +WWW: https://github.com/google/flatbuffers diff --git a/devel/py-flexmock/Makefile b/devel/py-flexmock/Makefile index 50c5f820c9f..a1cb6da4778 100644 --- a/devel/py-flexmock/Makefile +++ b/devel/py-flexmock/Makefile @@ -1,7 +1,7 @@ # Created by: Cheng-Lung Sung PORTNAME= flexmock -PORTVERSION= 0.11.2 +PORTVERSION= 0.11.3 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-flexmock/distinfo b/devel/py-flexmock/distinfo index f6a6a755cae..9e996986109 100644 --- a/devel/py-flexmock/distinfo +++ b/devel/py-flexmock/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971050 -SHA256 (flexmock-0.11.2.tar.gz) = 44f702c0d0adde7085b4c7afe9adab50b01b724aceeb7e49b29f5632e6325ce8 -SIZE (flexmock-0.11.2.tar.gz) = 51330 +TIMESTAMP = 1647264532 +SHA256 (flexmock-0.11.3.tar.gz) = b1fe35f6a5f32547b5cd31a15c060d9ab863dc08aff018cd73dc78d1b651edd4 +SIZE (flexmock-0.11.3.tar.gz) = 52509 diff --git a/devel/py-frozenlist/Makefile b/devel/py-frozenlist/Makefile index 94dd47c7d1d..cbec9735118 100644 --- a/devel/py-frozenlist/Makefile +++ b/devel/py-frozenlist/Makefile @@ -1,5 +1,5 @@ PORTNAME= frozenlist -PORTVERSION= 1.2.0 +PORTVERSION= 1.3.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,8 +10,13 @@ COMMENT= List-like structure which implements collections.abc.MutableSequence LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -USES= gettext-runtime python:3.6+ -USE_PYTHON= autoplist distutils -INSTALL_TARGET= install-strip +USES= gettext-runtime python:3.7+ +USE_PYTHON= autoplist cython distutils + +pre-configure: + @cd ${WRKSRC} && ${RM} frozenlist/_frozenlist.c && cython-${PYTHON_VER} -3 -I frozenlist -o frozenlist/_frozenlist.c frozenlist/_frozenlist.pyx + +post-install: + ${FIND} ${STAGEDIR}${PYTHON_SITELIBDIR} -name '*.so' -exec ${STRIP_CMD} {} + .include diff --git a/devel/py-frozenlist/distinfo b/devel/py-frozenlist/distinfo index d9ef02db751..53ba6380031 100644 --- a/devel/py-frozenlist/distinfo +++ b/devel/py-frozenlist/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1639526623 -SHA256 (frozenlist-1.2.0.tar.gz) = 68201be60ac56aff972dc18085800b6ee07973c49103a8aba669dee3d71079de -SIZE (frozenlist-1.2.0.tar.gz) = 65634 +TIMESTAMP = 1647264986 +SHA256 (frozenlist-1.3.0.tar.gz) = ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b +SIZE (frozenlist-1.3.0.tar.gz) = 65476 diff --git a/devel/py-fsspec/Makefile b/devel/py-fsspec/Makefile index c0a26df1249..524b94da148 100644 --- a/devel/py-fsspec/Makefile +++ b/devel/py-fsspec/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= fsspec -PORTVERSION= 2022.1.0 +PORTVERSION= 2022.2.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,7 +12,7 @@ COMMENT= File-system specification LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE -USES= python:3.6+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/devel/py-fsspec/distinfo b/devel/py-fsspec/distinfo index 8b51305048d..2f51d441451 100644 --- a/devel/py-fsspec/distinfo +++ b/devel/py-fsspec/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971052 -SHA256 (fsspec-2022.1.0.tar.gz) = 0bdd519bbf4d8c9a1d893a50b5ebacc89acd0e1fe0045d2f7b0e0c1af5990edc -SIZE (fsspec-2022.1.0.tar.gz) = 135205 +TIMESTAMP = 1647264534 +SHA256 (fsspec-2022.2.0.tar.gz) = 20322c659538501f52f6caa73b08b2ff570b7e8ea30a86559721d090e473ad5c +SIZE (fsspec-2022.2.0.tar.gz) = 136750 diff --git a/devel/py-google-cloud-iam/Makefile b/devel/py-google-cloud-iam/Makefile index a10dfebf942..af3047d5d47 100644 --- a/devel/py-google-cloud-iam/Makefile +++ b/devel/py-google-cloud-iam/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-cloud-iam -PORTVERSION= 2.5.1 +PORTVERSION= 2.6.1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,8 +12,8 @@ COMMENT= IAM Service Account Credentials API client library LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.28.0<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}proto-plus>=0.4.0:devel/py-proto-plus@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.15.0:devel/py-proto-plus@${PY_FLAVOR} USES= python:3.6+ USE_PYTHON= autoplist concurrent distutils diff --git a/devel/py-google-cloud-iam/distinfo b/devel/py-google-cloud-iam/distinfo index aa990775469..6fabc4aac97 100644 --- a/devel/py-google-cloud-iam/distinfo +++ b/devel/py-google-cloud-iam/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1636117256 -SHA256 (google-cloud-iam-2.5.1.tar.gz) = b26294d02b14b40586eceb099a0e3a74265ae10a3f46fd49890cac55ad5f861f -SIZE (google-cloud-iam-2.5.1.tar.gz) = 33917 +TIMESTAMP = 1647264536 +SHA256 (google-cloud-iam-2.6.1.tar.gz) = 7355cf1393c10d7460a6228605f0211c1eb24cfcaf6f2dda6799c5e34faa1624 +SIZE (google-cloud-iam-2.6.1.tar.gz) = 44435 diff --git a/devel/py-grizzled/Makefile b/devel/py-grizzled/Makefile index 71468a42eae..49198372a40 100644 --- a/devel/py-grizzled/Makefile +++ b/devel/py-grizzled/Makefile @@ -2,7 +2,7 @@ PORTNAME= grizzled PORTVERSION= 0.9.4 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,4 +13,7 @@ COMMENT= The Grizzled Python Utility Library USES= python:3.6+ USE_PYTHON= distutils autoplist +post-patch: + @${RM} ${WRKSRC}/ez_setup.py + .include diff --git a/devel/py-grizzled/files/patch-2to3 b/devel/py-grizzled/files/patch-2to3 new file mode 100644 index 00000000000..1ab44ad5a11 --- /dev/null +++ b/devel/py-grizzled/files/patch-2to3 @@ -0,0 +1,1595 @@ +--- grizzled/collections/dict.py.orig 2010-05-10 02:09:09 UTC ++++ grizzled/collections/dict.py +@@ -64,7 +64,7 @@ class OrderedDict(dict): + def __str__(self): + s = '{' + sep = '' +- for k, v in self.iteritems(): ++ for k, v in self.items(): + s += sep + if type(k) == str: + s += "'%s'" % k +@@ -98,7 +98,7 @@ class OrderedDict(dict): + yield key + + def update(self, d): +- for key, value in d.iteritems(): ++ for key, value in d.items(): + self[key] = value + + def pop(self, key, default=None): +@@ -165,7 +165,7 @@ class LRUList(object): + self.clear() + + def __str__(self): +- return '[' + ', '.join([str(tup) for tup in self.items()]) + ']' ++ return '[' + ', '.join([str(tup) for tup in list(self.items())]) + ']' + + def __repr__(self): + return self.__class__.__name__ + ':' + str(self) +@@ -177,20 +177,20 @@ class LRUList(object): + entry = self.head + while entry: + yield entry.key +- entry = entry.next ++ entry = entry.__next__ + + def keys(self): + return [k for k in self] + + def items(self): + result = [] +- for key, value in self.iteritems(): ++ for key, value in self.items(): + result.append((key, value)) + return result + + def values(self): + result = [] +- for key, value in self.iteritems(): ++ for key, value in self.items(): + result.append(value) + return result + +@@ -198,7 +198,7 @@ class LRUList(object): + entry = self.head + while entry: + yield (entry.key, entry.value) +- entry = entry.next ++ entry = entry.__next__ + + def iterkeys(self): + self.__iter__() +@@ -207,12 +207,12 @@ class LRUList(object): + entry = self.head + while entry: + yield entry.value +- entry = entry.next ++ entry = entry.__next__ + + def clear(self): + while self.head: + cur = self.head +- next = self.head.next ++ next = self.head.__next__ + cur.next = cur.previous = cur.key = cur.value = None + self.head = next + +@@ -220,14 +220,14 @@ class LRUList(object): + self.size = 0 + + def remove(self, entry): +- if entry.next: ++ if entry.__next__: + entry.next.previous = entry.previous + + if entry.previous: +- entry.previous.next = entry.next ++ entry.previous.next = entry.__next__ + + if entry == self.head: +- self.head = entry.next ++ self.head = entry.__next__ + + if entry == self.tail: + self.tail = entry.previous +@@ -309,11 +309,11 @@ class LRUDict(dict): + max_capacity : int + The maximum size of the dictionary + """ +- if kw.has_key('max_capacity'): ++ if 'max_capacity' in kw: + self.__max_capacity = kw['max_capacity'] + del kw['max_capacity'] + else: +- self.__max_capacity = sys.maxint ++ self.__max_capacity = sys.maxsize + + dict.__init__(self) + self.__removal_listeners = {} +@@ -411,7 +411,7 @@ class LRUDict(dict): + """ + Clear all removal and ejection listeners from the list of listeners. + """ +- for key in self.__removal_listeners.keys(): ++ for key in list(self.__removal_listeners.keys()): + del self.__removal_listeners[key] + + def __setitem__(self, key, value): +@@ -431,7 +431,7 @@ class LRUDict(dict): + def __str__(self): + s = '{' + sep = '' +- for k, v in self.iteritems(): ++ for k, v in self.items(): + s += sep + if type(k) == str: + s += "'%s'" % k +@@ -462,25 +462,25 @@ class LRUDict(dict): + return value + + def keys(self): +- return self.__lru_queue.keys() ++ return list(self.__lru_queue.keys()) + + def items(self): +- return self.__lru_queue.items() ++ return list(self.__lru_queue.items()) + + def values(self): +- return self.__lru_queue.values() ++ return list(self.__lru_queue.values()) + + def iteritems(self): +- return self.__lru_queue.iteritems() ++ return iter(self.__lru_queue.items()) + + def iterkeys(self): +- return self.__lru_queue.iterkeys() ++ return iter(self.__lru_queue.keys()) + + def itervalues(self): +- return self.__lru_queue.itervalues() ++ return iter(self.__lru_queue.values()) + + def update(self, d): +- for key, value in d.iteritems(): ++ for key, value in d.items(): + self[key] = value + + def pop(self, key, default=None): +@@ -507,7 +507,7 @@ class LRUDict(dict): + :raise KeyError: empty dictionary + """ + if len(self) == 0: +- raise KeyError, 'Attempted popitem() on empty dictionary' ++ raise KeyError('Attempted popitem() on empty dictionary') + + lru_entry = self.__lru_queue.remove_tail() + dict.__delitem__(self, lru_entry.key) +@@ -553,7 +553,7 @@ class LRUDict(dict): + def __notify_listeners(self, ejecting, key_value_pairs): + if self.__removal_listeners: + for key, value in key_value_pairs: +- for func, func_data in self.__removal_listeners.items(): ++ for func, func_data in list(self.__removal_listeners.items()): + on_eject_only, args = func_data + if (not on_eject_only) or ejecting: + func(key, value, *args) +--- grizzled/collections/tuple.py.orig 2010-05-10 02:09:26 UTC ++++ grizzled/collections/tuple.py +@@ -76,7 +76,7 @@ def _local_namedtuple(typename, fieldnames, verbose=Fa + # generating informative error messages and preventing template injection + # attacks. + +- if isinstance(fieldnames, basestring): ++ if isinstance(fieldnames, str): + # names separated by whitespace and/or commas + fieldnames = fieldnames.replace(',', ' ').split() + +@@ -138,13 +138,13 @@ def _local_namedtuple(typename, fieldnames, verbose=Fa + template += ' %s = property(itemgetter(%d))\n' % (name, i) + + if verbose: +- print template ++ print(template) + + # Execute the template string in a temporary namespace + namespace = dict(itemgetter=_itemgetter) + try: +- exec template in namespace +- except SyntaxError, e: ++ exec(template, namespace) ++ except SyntaxError as e: + raise SyntaxError(e.message + ':\n' + template) + + result = namespace[typename] +--- grizzled/config.py.orig 2010-05-10 02:06:31 UTC ++++ grizzled/config.py +@@ -169,15 +169,15 @@ That will preprocess the enhanced configuration file, + that is suitable for parsing by the standard Python ``config`` module. + ''' + +-from __future__ import absolute_import + ++ + __docformat__ = "restructuredtext en" + + # --------------------------------------------------------------------------- + # Imports + # --------------------------------------------------------------------------- + +-import ConfigParser ++import configparser + import logging + import string + import os +@@ -200,8 +200,8 @@ __all__ = ['Configuration', 'preprocess', + # --------------------------------------------------------------------------- + + log = logging.getLogger('grizzled.config') +-NoOptionError = ConfigParser.NoOptionError +-NoSectionError = ConfigParser.NoSectionError ++NoOptionError = configparser.NoOptionError ++NoSectionError = configparser.NoSectionError + + # --------------------------------------------------------------------------- + # Constants +@@ -250,7 +250,7 @@ class NoVariableError(ExceptionWithMessage): + """ + pass + +-class Configuration(ConfigParser.SafeConfigParser): ++class Configuration(configparser.SafeConfigParser): + """ + Configuration file parser. See the module documentation for details. + """ +@@ -279,7 +279,7 @@ class Configuration(ConfigParser.SafeConfigParser): + substitute a non-existent variable. Otherwise, simple + substitute an empty value. + """ +- ConfigParser.SafeConfigParser.__init__(self, defaults) ++ configparser.SafeConfigParser.__init__(self, defaults) + self.__permit_includes = permit_includes + self.__use_ordered_sections = use_ordered_sections + self.__strict_substitution = strict_substitution +@@ -294,7 +294,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :rtype: dict + :return: the instance-wide defaults, or ``None`` if there aren't any + """ +- return ConfigParser.SafeConfigParser.defaults(self) ++ return configparser.SafeConfigParser.defaults(self) + + @property + def sections(self): +@@ -305,7 +305,7 @@ class Configuration(ConfigParser.SafeConfigParser): + + Returns a list of sections. + """ +- return ConfigParser.SafeConfigParser.sections(self) ++ return configparser.SafeConfigParser.sections(self) + + def add_section(self, section): + """ +@@ -318,7 +318,7 @@ class Configuration(ConfigParser.SafeConfigParser): + + :raise DuplicateSectionError: section already exists + """ +- ConfigParser.SafeConfigParser.add_section(self, section) ++ configparser.SafeConfigParser.add_section(self, section) + + def has_section(self, section): + """ +@@ -333,7 +333,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :return: ``True`` if the section exists in the configuration, ``False`` + if not. + """ +- return ConfigParser.SafeConfigParser.has_section(self, section) ++ return configparser.SafeConfigParser.has_section(self, section) + + def options(self, section): + """ +@@ -348,7 +348,7 @@ class Configuration(ConfigParser.SafeConfigParser): + + :raise NoSectionError: no such section + """ +- return ConfigParser.SafeConfigParser.options(self, section) ++ return configparser.SafeConfigParser.options(self, section) + + def has_option(self, section, option): + """ +@@ -364,7 +364,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :return: ``True`` if the section exists in the configuration and + has the specified option, ``False`` if not. + """ +- return ConfigParser.SafeConfigParser.has_option(self, section, option) ++ return configparser.SafeConfigParser.has_option(self, section, option) + + def read(self, filenames): + """ +@@ -398,7 +398,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :rtype: list + :return: list of successfully parsed filenames or URLs + """ +- if isinstance(filenames, basestring): ++ if isinstance(filenames, str): + filenames = [filenames] + + newFilenames = [] +@@ -446,9 +446,9 @@ class Configuration(ConfigParser.SafeConfigParser): + :raise NoOptionError: no such option in the section + """ + def do_get(section, option): +- val = ConfigParser.SafeConfigParser.get(self, section, option) ++ val = configparser.SafeConfigParser.get(self, section, option) + if len(val.strip()) == 0: +- raise ConfigParser.NoOptionError(option, section) ++ raise configparser.NoOptionError(option, section) + return val + + if optional: +@@ -477,7 +477,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :raise NoOptionError: no such option in the section + """ + def do_get(section, option): +- return ConfigParser.SafeConfigParser.getint(self, section, option) ++ return configparser.SafeConfigParser.getint(self, section, option) + + if optional: + return self.__get_optional(do_xget, section, option) +@@ -505,7 +505,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :raise NoOptionError: no such option in the section + """ + def do_get(section, option): +- return ConfigParser.SafeConfigParser.getfloat(self, section, option) ++ return configparser.SafeConfigParser.getfloat(self, section, option) + + if optional: + return self.__get_optional(do_get, section, option) +@@ -538,7 +538,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :raise ValueError: non-boolean value encountered + ''' + def do_get(section, option): +- return ConfigParser.SafeConfigParser.getboolean(self, ++ return configparser.SafeConfigParser.getboolean(self, + section, + option) + +@@ -572,7 +572,7 @@ class Configuration(ConfigParser.SafeConfigParser): + :raise NoOptionError: no such option in the section + ''' + def do_get(section, option): +- value = ConfigParser.SafeConfigParser.get(self, section, option) ++ value = configparser.SafeConfigParser.get(self, section, option) + return value.split(sep) + + if optional: +@@ -667,7 +667,7 @@ class Configuration(ConfigParser.SafeConfigParser): + + :raise NoSectionError: no such section + """ +- return ConfigParser.SafeConfigParser.items(self, section) ++ return configparser.SafeConfigParser.items(self, section) + + def set(self, section, option, value): + """ +@@ -684,7 +684,7 @@ class Configuration(ConfigParser.SafeConfigParser): + + :raise NoSectionError: no such section + """ +- ConfigParser.SafeConfigParser.set(self, section, option, value) ++ configparser.SafeConfigParser.set(self, section, option, value) + + def write(self, fileobj): + """ +@@ -698,7 +698,7 @@ class Configuration(ConfigParser.SafeConfigParser): + fileobj : file + file-like object to which to write the configuration + """ +- ConfigParser.SafeConfigParser.write(self, fileobj) ++ configparser.SafeConfigParser.write(self, fileobj) + + def remove_section(self, section): + """ +@@ -711,7 +711,7 @@ class Configuration(ConfigParser.SafeConfigParser): + + :raise NoSectionError: no such section + """ +- ConfigParser.SafeConfigParser.remove_section(self, section) ++ configparser.SafeConfigParser.remove_section(self, section) + + def optionxform(self, option_name): + """ +@@ -728,9 +728,9 @@ class Configuration(ConfigParser.SafeConfigParser): + def __get_optional(self, func, section, option): + try: + return func(section, option) +- except ConfigParser.NoOptionError: ++ except configparser.NoOptionError: + return None +- except ConfigParser.NoSectionError: ++ except configparser.NoSectionError: + return None + + def __preprocess(self, fp, name): +@@ -755,7 +755,7 @@ class Configuration(ConfigParser.SafeConfigParser): + + # Parse the resulting file into a local ConfigParser instance. + +- parsedConfig = ConfigParser.SafeConfigParser() ++ parsedConfig = configparser.SafeConfigParser() + + if self.__use_ordered_sections: + parsedConfig._sections = OrderedDict() +@@ -853,15 +853,15 @@ class _ConfigDict(dict): + except KeyError: + result = default + +- except ConfigParser.NoSectionError: ++ except configparser.NoSectionError: + result = default + +- except ConfigParser.NoOptionError: ++ except configparser.NoOptionError: + result = default + + if not result: + if self.__strict_substitution: +- raise NoVariableError, 'No such variable: "%s"' % key ++ raise NoVariableError('No such variable: "%s"' % key) + else: + result = '' + +@@ -888,7 +888,7 @@ class _ConfigDict(dict): + if section == 'env': + result = os.environ[option] + if len(result) == 0: +- raise KeyError, option ++ raise KeyError(option) + + elif section == 'program': + result = self.__value_from_program_section(option) +@@ -968,6 +968,6 @@ if __name__ == '__main__': + for var in sys.argv[2:]: + (section, option) = var.split(':') + val = config.get(section, option, optional=True) +- print '%s=%s' % (var, val) ++ print('%s=%s' % (var, val)) + else: + config.write(sys.stdout) +--- grizzled/db/__init__.py.orig 2009-10-24 15:46:15 UTC ++++ grizzled/db/__init__.py +@@ -149,8 +149,8 @@ def add_driver(key, driver_class, force=False): + try: + drivers[key] + if not force: +- raise ValueError, 'A DB driver named "%s" is already installed' %\ +- key ++ raise ValueError('A DB driver named "%s" is already installed' %\ ++ key) + except KeyError: + pass + +@@ -170,7 +170,7 @@ def get_drivers(): + :rtype: list + :return: list of ``DBDriver`` class names + """ +- return [str(d) for d in drivers.values()] ++ return [str(d) for d in list(drivers.values())] + + def get_driver_names(): + """ +@@ -178,7 +178,7 @@ def get_driver_names(): + Each of the returned names may be used as the first parameter to + the ``get_driver()`` function. + """ +- return drivers.keys() ++ return list(drivers.keys()) + + def get_driver(driver_name): + """ +@@ -197,9 +197,9 @@ def get_driver(driver_name): + try: + o = drivers[driver_name] + if type(o) == str: +- exec 'd = %s()' % o ++ exec('d = %s()' % o) + else: + d = o() + return d + except KeyError: +- raise ValueError, 'Unknown driver name: "%s"' % driver_name ++ raise ValueError('Unknown driver name: "%s"' % driver_name) +--- grizzled/db/base.py.orig 2009-10-24 15:45:34 UTC ++++ grizzled/db/base.py +@@ -118,9 +118,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + return self.__cursor.close() +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def execute(self, statement, parameters=None): +@@ -152,9 +152,9 @@ class Cursor(object): + self.__rowcount = -1 + self.__description = self.__cursor.description + return result +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + except: + raise Error(sys.exc_info()[1]) +@@ -181,9 +181,9 @@ class Cursor(object): + self.__rowcount = self.__cursor.rowcount + self.__description = self.__cursor.description + return result +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + executeMany = executemany +@@ -202,9 +202,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + return self.__cursor.fetchone() +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def fetchall(self): +@@ -221,9 +221,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + return self.__cursor.fetchall() +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + fetchAll = fetchall +@@ -247,9 +247,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + self.__cursor.fetchmany(n) +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + fetchMany = fetchmany +@@ -277,9 +277,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + return self.__driver.get_rdbms_metadata(self.__cursor) +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def get_table_metadata(self, table): +@@ -321,9 +321,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + return self.__driver.get_table_metadata(table, self.__cursor) +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def get_index_metadata(self, table): +@@ -355,9 +355,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + return self.__driver.get_index_metadata(table, self.__cursor) +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def get_tables(self): +@@ -376,9 +376,9 @@ class Cursor(object): + dbi = self.__driver.get_import() + try: + return self.__driver.get_tables(self.__cursor) +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + class DB(object): +@@ -403,9 +403,9 @@ class DB(object): + dbi = driver.get_import() + for attr in ['BINARY', 'NUMBER', 'STRING', 'DATETIME', 'ROWID']: + try: +- exec 'self.%s = dbi.%s' % (attr, attr) ++ exec('self.%s = dbi.%s' % (attr, attr)) + except AttributeError: +- exec 'self.%s = 0' % attr ++ exec('self.%s = 0' % attr) + + def paramstyle(self): + """ +@@ -607,9 +607,9 @@ class DB(object): + dbi = self.__driver.get_import() + try: + return Cursor(self.__db.cursor(), self.__driver) +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def commit(self): +@@ -622,9 +622,9 @@ class DB(object): + dbi = self.__driver.get_import() + try: + self.__db.commit() +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def rollback(self): +@@ -637,9 +637,9 @@ class DB(object): + dbi = self.__driver.get_import() + try: + self.__db.rollback() +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + def close(self): +@@ -652,9 +652,9 @@ class DB(object): + dbi = self.__driver.get_import() + try: + self.__db.close() +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + class DBDriver(object): +@@ -734,9 +734,9 @@ class DBDriver(object): + password=password, + database=database) + return DB(self.__db, self) +- except dbi.Warning, val: ++ except dbi.Warning as val: + raise Warning(val) +- except dbi.Error, val: ++ except dbi.Error as val: + raise Error(val) + + @abstract +@@ -958,7 +958,7 @@ class DBDriver(object): + :raise Error: bad table name + """ + if not self._is_valid_table(cursor, table_name): +- raise Error, 'No such table: "%s"' % table_name ++ raise Error('No such table: "%s"' % table_name) + + def _is_valid_table(self, cursor, table_name): + """ +--- grizzled/db/dummydb.py.orig 2009-10-24 15:45:33 UTC ++++ grizzled/db/dummydb.py +@@ -37,13 +37,13 @@ class DummyCursor(object): + return None + + def fetchone(self): +- raise ValueError, "No results" ++ raise ValueError("No results") + + def fetchall(self): +- raise ValueError, "No results" ++ raise ValueError("No results") + + def fetchmany(self, n): +- raise ValueError, "No results" ++ raise ValueError("No results") + + class DummyDB(object): + +@@ -66,7 +66,7 @@ class DummyDriver(DBDriver): + """Dummy database driver, for testing.""" + + def get_import(self): +- import dummydb ++ from . import dummydb + return dummydb + + def get_display_name(self): +--- grizzled/decorators.py.orig 2010-05-10 02:06:50 UTC ++++ grizzled/decorators.py +@@ -177,5 +177,5 @@ if __name__ == '__main__': + try: + b.foo() + assert False +- except NotImplementedError, ex: +- print ex.message ++ except NotImplementedError as ex: ++ print(ex.message) +--- grizzled/file/__init__.py.orig 2010-05-10 02:04:49 UTC ++++ grizzled/file/__init__.py +@@ -2,8 +2,8 @@ + This module contains file- and path-related methods, classes, and modules. + """ + +-from __future__ import with_statement, absolute_import + ++ + __docformat__ = "restructuredtext en" + + # --------------------------------------------------------------------------- +@@ -79,7 +79,7 @@ def list_recursively(dir): + but is not a directory. + """ + if not _os.path.isdir(dir): +- raise ValueError, "%s is not a directory." % dir ++ raise ValueError("%s is not a directory." % dir) + + for f in _os.listdir(dir): + if _os.path.isdir(f): +@@ -135,7 +135,7 @@ def copy(files, target_dir, create_target=False): + _os.mkdir(target_dir) + + if _os.path.exists(target_dir) and (not _os.path.isdir(target_dir)): +- raise OSError, 'Cannot copy files to non-directory "%s"' % target_dir ++ raise OSError('Cannot copy files to non-directory "%s"' % target_dir) + + for f in files: + targetFile = _os.path.join(target_dir, _os.path.basename(f)) +@@ -167,7 +167,7 @@ def touch(files, times=None): + for f in files: + if _os.path.exists(f): + if not _os.path.isfile(f): +- raise OSError, "Can't touch non-file \"%s\"" % f ++ raise OSError("Can't touch non-file \"%s\"" % f) + _os.utime(f, times) + + else: +--- grizzled/file/includer.py.orig 2010-05-10 02:05:02 UTC ++++ grizzled/file/includer.py +@@ -89,8 +89,8 @@ import sys + import re + import tempfile + import atexit +-import urllib2 +-import urlparse ++import urllib.request, urllib.error, urllib.parse ++import urllib.parse + + import grizzled.exception + from grizzled.file import unlink_quietly +@@ -179,7 +179,7 @@ class Includer(object): + self.__name = name + + if output == None: +- from cStringIO import StringIO ++ from io import StringIO + output = StringIO() + + self.__maxnest = max_nest_level +@@ -198,7 +198,7 @@ class Includer(object): + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + """A file object is its own iterator. + + :rtype: string +@@ -302,15 +302,15 @@ class Includer(object): + + def truncate(self, size=None): + """Not supported, since ``Includer`` objects are read-only.""" +- raise IncludeError, 'Includers are read-only file objects.' ++ raise IncludeError('Includers are read-only file objects.') + + def write(self, s): + """Not supported, since ``Includer`` objects are read-only.""" +- raise IncludeError, 'Includers are read-only file objects.' ++ raise IncludeError('Includers are read-only file objects.') + + def writelines(self, iterable): + """Not supported, since ``Includer`` objects are read-only.""" +- raise IncludeError, 'Includers are read-only file objects.' ++ raise IncludeError('Includers are read-only file objects.') + + def flush(self): + """No-op.""" +@@ -333,8 +333,8 @@ class Includer(object): + match = self.__include_pattern.search(line) + if match: + if self.__nested >= self.__maxnest: +- raise IncludeError, 'Exceeded maximum include recursion ' \ +- 'depth of %d' % self.__maxnest ++ raise IncludeError('Exceeded maximum include recursion ' \ ++ 'depth of %d' % self.__maxnest) + + inc_name = match.group(1) + logging.debug('Found include directive: %s' % line[:-1]) +@@ -351,12 +351,12 @@ class Includer(object): + is_url = False + openFunc = None + +- parsed_url = urlparse.urlparse(name_to_open) ++ parsed_url = urllib.parse.urlparse(name_to_open) + + # Account for Windows drive letters. + + if (parsed_url.scheme != '') and (len(parsed_url.scheme) > 1): +- openFunc = urllib2.urlopen ++ openFunc = urllib.request.urlopen + is_url = True + + else: +@@ -365,8 +365,8 @@ class Includer(object): + if enclosing_file_is_url: + # Use the parent URL as the base URL. + +- name_to_open = urlparse.urljoin(enclosing_file, name_to_open) +- open_func = urllib2.urlopen ++ name_to_open = urllib.parse.urljoin(enclosing_file, name_to_open) ++ open_func = urllib.request.urlopen + is_url = True + + elif not os.path.isabs(name_to_open): +@@ -391,8 +391,8 @@ class Includer(object): + log.debug('Opening "%s"' % name_to_open) + f = open_func(name_to_open) + except: +- raise IncludeError, 'Unable to open "%s" as a file or a URL' %\ +- name_to_open ++ raise IncludeError('Unable to open "%s" as a file or a URL' %\ ++ name_to_open) + return (f, is_url, name_to_open) + + # --------------------------------------------------------------------------- +@@ -441,7 +441,7 @@ def preprocess(file_or_url, output=None, temp_suffix=' + + def _complain_if_closed(closed): + if closed: +- raise IncludeError, "I/O operation on closed file" ++ raise IncludeError("I/O operation on closed file") + + # --------------------------------------------------------------------------- + # Main program (for testing) +@@ -453,21 +453,21 @@ if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG, format=format) + + for file in sys.argv[1:]: +- import cStringIO as StringIO ++ import io as StringIO + out = StringIO.StringIO() + preprocess(file, output=out) + + header = 'File: %s, via preprocess()' + sep = '-' * len(header) +- print '\n%s\n%s\n%s\n' % (sep, header, sep) ++ print('\n%s\n%s\n%s\n' % (sep, header, sep)) + for line in out.readlines(): + sys.stdout.write(line) +- print sep ++ print(sep) + + inc = Includer(file) + header = 'File: %s, via Includer' + sep = '-' * len(header) +- print '\n%s\n%s\n%s\n' % (sep, header, sep) ++ print('\n%s\n%s\n%s\n' % (sep, header, sep)) + for line in inc: + sys.stdout.write(line) +- print '%s' % sep ++ print('%s' % sep) +--- grizzled/history.py.orig 2010-05-10 02:07:04 UTC ++++ grizzled/history.py +@@ -19,8 +19,8 @@ To get the appropriate History implementation for the + simply call the ``get_history()`` factory method. + """ + +-from __future__ import with_statement + ++ + __docformat__ = "restructuredtext en" + + # --------------------------------------------------------------------------- +@@ -90,16 +90,16 @@ def get_history(verbose=True): + result = None + if _have_pyreadline: + if verbose: +- print 'Using pyreadline for history management.' ++ print('Using pyreadline for history management.') + result = PyReadlineHistory() + + elif _have_readline: + if verbose: +- print 'Using readline for history management.' ++ print('Using readline for history management.') + result = ReadlineHistory() + + else: +- print 'WARNING: Readline unavailable. There will be no history.' ++ print('WARNING: Readline unavailable. There will be no history.') + result = DummyHistory() + + result.max_length = DEFAULT_MAXLENGTH +@@ -132,7 +132,7 @@ class History(object): + Where to dump the history. + """ + for i in range(1, self.total + 1): +- print >> out, '%4d: %s' % (i, self.get_item(i)) ++ print('%4d: %s' % (i, self.get_item(i)), file=out) + + def get_last_matching_item(self, command_name): + """ +--- grizzled/io/__init__.py.orig 2009-10-24 15:48:25 UTC ++++ grizzled/io/__init__.py +@@ -4,8 +4,8 @@ + Input/Output utility methods and classes. + """ + +-from __future__ import absolute_import + ++ + __docformat__ = "restructuredtext en" + + # --------------------------------------------------------------------------- +@@ -201,7 +201,7 @@ class PushbackFile(object): + + :raise NotImplementedError: unconditionally + """ +- raise NotImplementedError, 'PushbackFile is read-only' ++ raise NotImplementedError('PushbackFile is read-only') + + def pushback(self, s): + """ +@@ -271,7 +271,7 @@ class PushbackFile(object): + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + """A file object is its own iterator. + + :rtype: str +@@ -296,7 +296,7 @@ class PushbackFile(object): + + :raise NotImplementedError: unconditionally + """ +- raise NotImplementedError, 'PushbackFile is read-only' ++ raise NotImplementedError('PushbackFile is read-only') + + def truncate(self, size=-1): + """ +@@ -310,7 +310,7 @@ class PushbackFile(object): + + :raise NotImplementedError: unconditionally + """ +- raise NotImplementedError, 'PushbackFile is read-only' ++ raise NotImplementedError('PushbackFile is read-only') + + def tell(self): + """ +@@ -323,7 +323,7 @@ class PushbackFile(object): + + :raise NotImplementedError: unconditionally + """ +- raise NotImplementedError, 'PushbackFile is not seekable' ++ raise NotImplementedError('PushbackFile is not seekable') + + def seek(self, offset, whence=os.SEEK_SET): + """ +@@ -338,7 +338,7 @@ class PushbackFile(object): + + :raise NotImplementedError: unconditionally + """ +- raise NotImplementedError, 'PushbackFile is not seekable' ++ raise NotImplementedError('PushbackFile is not seekable') + + def fileno(self): + """ +--- grizzled/io/filelock.py.orig 2009-10-24 15:46:23 UTC ++++ grizzled/io/filelock.py +@@ -88,8 +88,7 @@ class FileLock(object): + self.lock = cls(fd) + + except KeyError: +- raise NotImplementedError, \ +- '''Don't know how to lock files on "%s" systems.''' % os.name ++ raise NotImplementedError('''Don't know how to lock files on "%s" systems.''' % os.name) + + def acquire(self, no_wait=False): + """ +--- grizzled/net/ftp/parse.py.orig 2009-10-24 15:52:59 UTC ++++ grizzled/net/ftp/parse.py +@@ -226,10 +226,10 @@ class FTPListDataParser(object): + elif c == 'r': + result.try_retr = True + elif c == 's': +- result.size = long(buf[i+1:j]) ++ result.size = int(buf[i+1:j]) + elif c == 'm': + result.mtime_type = MTIME_TYPE.LOCAL +- result.mtime = long(buf[i+1:j]) ++ result.mtime = int(buf[i+1:j]) + elif c == 'i': + result.id_type = ID_TYPE.FULL + result.id = buf[i+1:j-i-1] +@@ -290,7 +290,7 @@ class FTPListDataParser(object): + + elif state == 4: # getting tentative size + try: +- size = long(buf[i:j]) ++ size = int(buf[i:j]) + except ValueError: + pass + state = 5 +@@ -300,25 +300,25 @@ class FTPListDataParser(object): + if month >= 0: + state = 6 + else: +- size = long(buf[i:j]) ++ size = int(buf[i:j]) + + elif state == 6: # have size and month +- mday = long(buf[i:j]) ++ mday = int(buf[i:j]) + state = 7 + + elif state == 7: # have size, month, mday + if (j - i == 4) and (buf[i+1] == ':'): +- hour = long(buf[i]) +- minute = long(buf[i+2:i+4]) ++ hour = int(buf[i]) ++ minute = int(buf[i+2:i+4]) + result.mtime_type = MTIME_TYPE.REMOTE_MINUTE + result.mtime = self._guess_time(month, mday, hour, minute) + elif (j - i == 5) and (buf[i+2] == ':'): +- hour = long(buf[i:i+2]) +- minute = long(buf[i+3:i+5]) ++ hour = int(buf[i:i+2]) ++ minute = int(buf[i+3:i+5]) + result.mtime_type = MTIME_TYPE.REMOTE_MINUTE + result.mtime = self._guess_time(month, mday, hour, minute) + elif j - i >= 4: +- year = long(buf[i:j]) ++ year = int(buf[i:j]) + result.mtimetype = MTIME_TYPE.REMOTE_DAY + result.mtime = self._get_mtime(year, month, mday) + else: +@@ -383,7 +383,7 @@ class FTPListDataParser(object): + j = i + + j = buf.index('-', j) +- mday = long(buf[i:j]) ++ mday = int(buf[i:j]) + + j = _skip(buf, j, '-') + i = j +@@ -395,13 +395,13 @@ class FTPListDataParser(object): + j = _skip(buf, j, '-') + i = j + j = buf.index(' ', j) +- year = long(buf[i:j]) ++ year = int(buf[i:j]) + + j = _skip(buf, j, ' ') + i = j + + j = buf.index(':', j) +- hour = long(buf[i:j]) ++ hour = int(buf[i:j]) + j = _skip(buf, j, ':') + i = j + +@@ -410,7 +410,7 @@ class FTPListDataParser(object): + if j == buflen: + raise IndexError # abort, abort! + +- minute = long(buf[i:j]) ++ minute = int(buf[i:j]) + + result.mtimetype = MTIME_TYPE.REMOTE_MINUTE + result.mtime = self._get_mtime(year, month, mday, hour, minute) +@@ -434,17 +434,17 @@ class FTPListDataParser(object): + result = FTPListData(buf) + + j = buf.index('-', j) +- month = long(buf[i:j]) ++ month = int(buf[i:j]) + + j = _skip(buf, j, '-') + i = j + j = buf.index('-', j) +- mday = long(buf[i:j]) ++ mday = int(buf[i:j]) + + j = _skip(buf, j, '-') + i = j + j = buf.index(' ', j) +- year = long(buf[i:j]) ++ year = int(buf[i:j]) + if year < 50: + year += 2000 + if year < 1000: +@@ -453,14 +453,14 @@ class FTPListDataParser(object): + j = _skip(buf, j, ' ') + i = j + j = buf.index(':', j) +- hour = long(buf[i:j]) ++ hour = int(buf[i:j]) + j = _skip(buf, j, ':') + i = j + while not (buf[j] in 'AP'): + j += 1 + if j == buflen: + raise IndexError +- minute = long(buf[i:j]) ++ minute = int(buf[i:j]) + + if buf[j] == 'A': + j += 1 +@@ -486,7 +486,7 @@ class FTPListDataParser(object): + i = j + j = buf.index(' ', j) + +- result.size = long(buf[i:j]) ++ result.size = int(buf[i:j]) + result.try_retr = True + + j = _skip(buf, j, ' ') +@@ -560,7 +560,7 @@ if __name__ == '__main__': + {'line': '-rw-r--r-- 1 root other 531 Jan 29 03:26 README', + 'type': 'Unix', + 'size': 531, +- 'time': (current_year, 1, 29, 03, 26, 0, 0, 0, -1), ++ 'time': (current_year, 1, 29, 0o3, 26, 0, 0, 0, -1), + 'name': 'README', + 'try_cwd': False}, + +@@ -632,7 +632,7 @@ if __name__ == '__main__': + 'type': 'MultiNet/VMS', + 'size': 0, + # Doesn't parse the seconds +- 'time': (1996, 1, 29, 03, 33, 0, 0, 0, -1), ++ 'time': (1996, 1, 29, 0o3, 33, 0, 0, 0, -1), + 'name': 'CII-MANUAL.TEX', + 'try_cwd': False}, + +@@ -655,7 +655,7 @@ if __name__ == '__main__': + {'line': '04-14-99 03:47PM 589 readme.htm', + 'type': 'MS-DOS', + 'size': 589, +- 'time': (1999, 04, 14, 15, 47, 0, 0, 0, -1), ++ 'time': (1999, 0o4, 14, 15, 47, 0, 0, 0, -1), + 'name': 'readme.htm', + 'try_cwd': False}, + ] +@@ -671,7 +671,7 @@ if __name__ == '__main__': + for test in test_data: + line = test['line'] + prefix = 'Test %d (%s)' % (i, test['type']) +- print '%s: "%s"' % (prefix, test['name']) ++ print('%s: "%s"' % (prefix, test['name'])) + result = parser.parse_line(line) + assertEquals(result.raw_line, line, prefix) + assertEquals(result.size, test['size'], prefix) +--- grizzled/os.py.orig 2010-05-10 02:08:04 UTC ++++ grizzled/os.py +@@ -11,8 +11,8 @@ The ``grizzled.os`` module contains some operating sys + classes. It is a conceptual extension of the standard Python ``os`` module. + """ + +-from __future__ import absolute_import + ++ + __docformat__ = "restructuredtext en" + + # --------------------------------------------------------------------------- +@@ -270,8 +270,8 @@ def daemonize(no_close=False, pidfile=None): + def __fork(): + try: + return _os.fork() +- except OSError, e: +- raise DaemonError, ('Cannot fork', e.errno, e.strerror) ++ except OSError as e: ++ raise DaemonError('Cannot fork', e.errno, e.strerror) + + def __redirect_file_descriptors(): + import resource # POSIX resource information +@@ -306,8 +306,7 @@ def daemonize(no_close=False, pidfile=None): + + if _os.name != 'posix': + import errno +- raise DaemonError, \ +- ('daemonize() is only supported on Posix-compliant systems.', ++ raise DaemonError('daemonize() is only supported on Posix-compliant systems.', + errno.ENOSYS, _os.strerror(errno.ENOSYS)) + + try: +@@ -358,8 +357,8 @@ def daemonize(no_close=False, pidfile=None): + except DaemonError: + raise + +- except OSError, e: +- raise DaemonError, ('Unable to daemonize()', e.errno, e.strerror) ++ except OSError as e: ++ raise DaemonError('Unable to daemonize()', e.errno, e.strerror) + + # --------------------------------------------------------------------------- + # Main program (for testing) +--- grizzled/system.py.orig 2010-05-10 02:07:54 UTC ++++ grizzled/system.py +@@ -10,8 +10,8 @@ provide information about the Python system (the Pytho + etc.). It is a conceptual extension of the standard Python ``sys`` module. + """ + +-from __future__ import absolute_import + ++ + __docformat__ = "restructuredtext en" + + # --------------------------------------------------------------------------- +@@ -95,7 +95,7 @@ def python_version(version): + + tokens = version.split('.') + if len(tokens) > 3: +- raise ValueError, err ++ raise ValueError(err) + + major = int(tokens[0]) + minor = micro = serial = 0 +@@ -104,7 +104,7 @@ def python_version(version): + if len(tokens) > 1: + match = RELEASE_LEVEL_RE.match(tokens[1]) + if not match: +- raise ValueError, err ++ raise ValueError(err) + + minor = int(match.group(1)) + rl = match.group(2) +@@ -115,12 +115,12 @@ def python_version(version): + if len(tokens) > 2: + match = RELEASE_LEVEL_RE.match(tokens[2]) + if not match: +- raise ValueError, err ++ raise ValueError(err) + + micro = int(match.group(1)) + rl2 = match.group(2) + if rl and rl2: +- raise ValueError, err ++ raise ValueError(err) + if rl2: + release_level = rl2[0] + serial = int(rl2[1:]) +@@ -128,7 +128,7 @@ def python_version(version): + try: + release_level = RELEASE_LEVELS[release_level] + except KeyError: +- raise ValueError, err ++ raise ValueError(err) + + return (major << 24) |\ + (minor << 16) |\ +@@ -160,9 +160,8 @@ def split_python_version(version=None): + + release_level_string = RELEASE_LEVEL_NAMES.get(release_level, None) + if not release_level_string: +- raise ValueError, \ +- 'Bad release level 0x%x in version 0x%08x' %\ +- (release_level, version) ++ raise ValueError('Bad release level 0x%x in version 0x%08x' %\ ++ (release_level, version)) + + return (major, minor, micro, release_level_string, serial) + +@@ -208,15 +207,13 @@ def ensure_version(min_version): + elif type(min_version) == int: + pass + else: +- raise TypeError, \ +- 'version %s is not a string or an integer' % min_version ++ raise TypeError('version %s is not a string or an integer' % min_version) + + if _sys.hexversion < min_version: +- raise RuntimeError, \ +- 'This program requires Python version "%s" or better, but ' \ ++ raise RuntimeError('This program requires Python version "%s" or better, but ' \ + 'the current Python version is "%s".' %\ + (python_version_string(min_version), +- python_version_string(sys.hexversion)) ++ python_version_string(sys.hexversion))) + + + def class_for_name(class_name): +@@ -238,7 +235,7 @@ def class_for_name(class_name): + if len(tokens) > 1: + package = '.'.join(tokens[:-1]) + class_name = tokens[-1] +- exec 'from %s import %s' % (package, class_name) ++ exec('from %s import %s' % (package, class_name)) + + return eval(class_name) + +--- grizzled/text/__init__.py.orig 2009-10-24 15:46:33 UTC ++++ grizzled/text/__init__.py +@@ -10,7 +10,7 @@ __docformat__ = "restructuredtext en" + # Imports + # --------------------------------------------------------------------------- + +-from StringIO import StringIO ++from io import StringIO + + # --------------------------------------------------------------------------- + # Exports +@@ -117,10 +117,10 @@ def hexdump(source, out, width=16, start=0, limit=None + if length == 0: + if repeat_count and (not show_repeats): + if repeat_count > 1: +- print >> out, REPEAT_FORMAT % (repeat_count - 1) ++ print(REPEAT_FORMAT % (repeat_count - 1), file=out) + elif repeat_count == 1: +- print >> out, lastline +- print >> out, lastline ++ print(lastline, file=out) ++ print(lastline, file=out) + break + + else: +@@ -132,9 +132,9 @@ def hexdump(source, out, width=16, start=0, limit=None + else: + if repeat_count and (not show_repeats): + if repeat_count == 1: +- print >> out, lastline ++ print(lastline, file=out) + else: +- print >> out, REPEAT_FORMAT % (repeat_count - 1) ++ print(REPEAT_FORMAT % (repeat_count - 1), file=out) + repeat_count = 0 + + # Build output line. +@@ -149,7 +149,7 @@ def hexdump(source, out, width=16, start=0, limit=None + line = "%06x: %-*s %s" % (pos, hex_field_width, hex, asc) + + if show_buf: +- print >> out, line ++ print(line, file=out) + + pos = pos + length + lastbuf = buf +@@ -214,4 +214,4 @@ def str2bool(s): + 'off' : False, + 'on' : True}[s.lower()] + except KeyError: +- raise ValueError, 'Unrecognized boolean string: "%s"' % s ++ raise ValueError('Unrecognized boolean string: "%s"' % s) +--- test/collections/TestLRUDict.py.orig 2008-09-10 01:27:50 UTC ++++ test/collections/TestLRUDict.py +@@ -25,68 +25,68 @@ class TestLRUDict(object): + def test1(self): + lru = LRUDict(max_capacity=5) + +- print "Adding 'a' and 'b'" ++ print("Adding 'a' and 'b'") + lru['a'] = 'A' + lru['b'] = 'b' +- print lru +- print lru.keys() +- assert lru.keys() == ['b', 'a'] +- assert lru.values() == ['b', 'A'] ++ print(lru) ++ print(list(lru.keys())) ++ assert list(lru.keys()) == ['b', 'a'] ++ assert list(lru.values()) == ['b', 'A'] + +- print "Adding 'c'" ++ print("Adding 'c'") + lru['c'] = 'c' +- print lru +- print lru.keys() +- assert lru.keys() == ['c', 'b', 'a'] ++ print(lru) ++ print(list(lru.keys())) ++ assert list(lru.keys()) == ['c', 'b', 'a'] + +- print "Updating 'a'" ++ print("Updating 'a'") + lru['a'] = 'a' +- print lru +- print lru.keys() +- assert lru.keys() == ['a', 'c', 'b'] ++ print(lru) ++ print(list(lru.keys())) ++ assert list(lru.keys()) == ['a', 'c', 'b'] + +- print "Adding 'd' and 'e'" ++ print("Adding 'd' and 'e'") + lru['d'] = 'd' + lru['e'] = 'e' +- print lru +- print lru.keys() +- assert lru.keys() == ['e', 'd', 'a', 'c', 'b'] ++ print(lru) ++ print(list(lru.keys())) ++ assert list(lru.keys()) == ['e', 'd', 'a', 'c', 'b'] + +- print "Accessing 'b'" ++ print("Accessing 'b'") + assert lru['b'] == 'b' +- print lru +- print lru.keys() +- assert lru.keys() == ['b', 'e', 'd', 'a', 'c'] ++ print(lru) ++ print(list(lru.keys())) ++ assert list(lru.keys()) == ['b', 'e', 'd', 'a', 'c'] + +- print "Adding 'f'" ++ print("Adding 'f'") + lru['f'] = 'f' + # Should knock 'c' out of the list +- print lru +- print lru.keys() +- assert lru.keys() == ['f', 'b', 'e', 'd', 'a'] ++ print(lru) ++ print(list(lru.keys())) ++ assert list(lru.keys()) == ['f', 'b', 'e', 'd', 'a'] + + def on_remove(key, value, the_list): +- print 'on_remove("%s")' % key ++ print('on_remove("%s")' % key) + the_list.append(key) + +- print 'Reducing capacity. Should result in eviction.' ++ print('Reducing capacity. Should result in eviction.') + ejected = [] + lru.add_ejection_listener(on_remove, ejected) + lru.max_capacity = 3 + ejected.sort() +- print 'ejected=%s' % ejected ++ print('ejected=%s' % ejected) + assert ejected == ['a', 'd'] +- print lru.keys() +- assert lru.keys() == ['f', 'b', 'e'] ++ print(list(lru.keys())) ++ assert list(lru.keys()) == ['f', 'b', 'e'] + +- print 'Testing popitem()' ++ print('Testing popitem()') + key, value = lru.popitem() +- print lru +- print lru.keys() ++ print(lru) ++ print(list(lru.keys())) + assert key == 'e' +- assert lru.keys() == ['f', 'b'] ++ assert list(lru.keys()) == ['f', 'b'] + +- print 'Clearing dictionary' ++ print('Clearing dictionary') + lru.clear_listeners() + lru.clear() + del lru +@@ -95,12 +95,12 @@ class TestLRUDict(object): + lru[key] = key + + def testBig(self): +- print 'Putting 10000 entries in a new LRU cache' ++ print('Putting 10000 entries in a new LRU cache') + lru = LRUDict(max_capacity=10000) + for i in range(0, lru.max_capacity): + lru[i] = i + + assert len(lru) == lru.max_capacity +- print 'Adding one more' ++ print('Adding one more') + assert len(lru) == lru.max_capacity +- print iter(lru).next() ++ print(next(iter(lru))) +--- test/file/Test.py.orig 2008-09-10 01:27:50 UTC ++++ test/file/Test.py +@@ -7,7 +7,7 @@ + # --------------------------------------------------------------------------- + + from grizzled.file import * +-from cStringIO import StringIO ++from io import StringIO + import os + import tempfile + import atexit +@@ -36,7 +36,7 @@ class TestFilePackage(object): + + def testRecursivelyRemove(self): + path = tempfile.mkdtemp() +- print 'Created directory "%s"' % path ++ print('Created directory "%s"' % path) + + # Create some files underneath + +--- test/io/TestPushback.py.orig 2008-09-10 01:27:50 UTC ++++ test/io/TestPushback.py +@@ -7,7 +7,7 @@ + # --------------------------------------------------------------------------- + + from grizzled.io import * +-from cStringIO import StringIO ++from io import StringIO + import os + import tempfile + import atexit +@@ -31,24 +31,24 @@ ghi + pb = PushbackFile(f) + + s = pb.readline() +- print s ++ print(s) + assert s == 'abc\n' + pb.pushback(s) + s = pb.readline() +- print s ++ print(s) + assert s == 'abc\n' + s = pb.read(1) +- print s ++ print(s) + assert s == 'd' + s = pb.readline() +- print s ++ print(s) + assert s == 'ef\n' + s = pb.read(-1) +- print s ++ print(s) + assert s == 'ghi\n' + s = pb.readline() + assert s == '' + pb.pushback('foobar') + s = pb.readline() +- print s ++ print(s) + assert s == 'foobar' +\ No newline at end of file +--- test/text/TestStr2Bool.py.orig 2008-09-10 01:27:50 UTC ++++ test/text/TestStr2Bool.py +@@ -31,7 +31,7 @@ class TestStr2Bool(object): + ('1', True,)): + for s2 in (s, s.upper(), s.capitalize()): + val = str2bool(s2) +- print '"%s" -> %s. Expected=%s' % (s2, expected, val) ++ print('"%s" -> %s. Expected=%s' % (s2, expected, val)) + assert val == expected, \ + '"%s" does not produce expected %s' % (s2, expected) + diff --git a/devel/py-hash_ring/files/patch-2to3 b/devel/py-hash_ring/files/patch-2to3 new file mode 100644 index 00000000000..22bf1a6b274 --- /dev/null +++ b/devel/py-hash_ring/files/patch-2to3 @@ -0,0 +1,12 @@ +--- hash_ring/__init__.py.orig 2012-12-15 01:14:49 UTC ++++ hash_ring/__init__.py +@@ -1,6 +1,6 @@ +-from hash_ring import HashRing ++from .hash_ring import HashRing + + try: +- from memcache_ring import MemcacheRing +-except ImportError, e: ++ from .memcache_ring import MemcacheRing ++except ImportError as e: + pass diff --git a/devel/py-installer/Makefile b/devel/py-installer/Makefile index 80bcf17c59a..db6c06a8ee1 100644 --- a/devel/py-installer/Makefile +++ b/devel/py-installer/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= installer -PORTVERSION= 0.5.0 +PORTVERSION= 0.5.1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-installer/distinfo b/devel/py-installer/distinfo index 4c8901dee0f..53590e601ea 100644 --- a/devel/py-installer/distinfo +++ b/devel/py-installer/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057958 -SHA256 (installer-0.5.0.tar.gz) = 0cd6bdab3b358cf7e8749370b99aef9e12202751271c5ddb22126599b34dc665 -SIZE (installer-0.5.0.tar.gz) = 469711 +TIMESTAMP = 1647264540 +SHA256 (installer-0.5.1.tar.gz) = f970995ec2bb815e2fdaf7977b26b2091e1e386f0f42eafd5ac811953dc5d445 +SIZE (installer-0.5.1.tar.gz) = 469793 diff --git a/devel/py-installer/pkg-descr b/devel/py-installer/pkg-descr index 7baa82b559c..8f16020184a 100644 --- a/devel/py-installer/pkg-descr +++ b/devel/py-installer/pkg-descr @@ -7,4 +7,4 @@ wheels and installing packages from wheels. - Extensible simple implementations of the abstractions. - Platform-independent Python script wrapper generation. -WWW: https://github.com/pradyunsg/installer +WWW: https://github.com/pypa/installer diff --git a/devel/py-jupyter-core/Makefile b/devel/py-jupyter-core/Makefile index d29d9bdff1b..939d300ac58 100644 --- a/devel/py-jupyter-core/Makefile +++ b/devel/py-jupyter-core/Makefile @@ -1,5 +1,5 @@ PORTNAME= jupyter-core -PORTVERSION= 4.9.1 +PORTVERSION= 4.9.2 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-jupyter-core/distinfo b/devel/py-jupyter-core/distinfo index 726e01bebd8..997f9fbaccd 100644 --- a/devel/py-jupyter-core/distinfo +++ b/devel/py-jupyter-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1635788080 -SHA256 (jupyter_core-4.9.1.tar.gz) = dce8a7499da5a53ae3afd5a9f4b02e5df1d57250cf48f3ad79da23b4778cd6fa -SIZE (jupyter_core-4.9.1.tar.gz) = 74787 +TIMESTAMP = 1647264988 +SHA256 (jupyter_core-4.9.2.tar.gz) = d69baeb9ffb128b8cd2657fcf2703f89c769d1673c851812119e3a2a0e93ad9a +SIZE (jupyter_core-4.9.2.tar.gz) = 74912 diff --git a/devel/py-keystoneauth1/Makefile b/devel/py-keystoneauth1/Makefile index 04591363f2c..301463e7775 100644 --- a/devel/py-keystoneauth1/Makefile +++ b/devel/py-keystoneauth1/Makefile @@ -1,5 +1,5 @@ PORTNAME= keystoneauth1 -PORTVERSION= 4.4.0 +PORTVERSION= 4.5.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-keystoneauth1/distinfo b/devel/py-keystoneauth1/distinfo index 7448aa10eec..2272e2f7843 100644 --- a/devel/py-keystoneauth1/distinfo +++ b/devel/py-keystoneauth1/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632037090 -SHA256 (keystoneauth1-4.4.0.tar.gz) = 34662a6be67ab29424aabe6f99a8d7eb6b88d293109a07e60fea123ebffb314f -SIZE (keystoneauth1-4.4.0.tar.gz) = 266308 +TIMESTAMP = 1647264542 +SHA256 (keystoneauth1-4.5.0.tar.gz) = 49b3488966a43eeb0200ea511b997e6403c25d563a984c6330e82a0ebfc4540c +SIZE (keystoneauth1-4.5.0.tar.gz) = 266418 diff --git a/devel/py-libusb1/Makefile b/devel/py-libusb1/Makefile index 18f959980e0..0bc3df9c281 100644 --- a/devel/py-libusb1/Makefile +++ b/devel/py-libusb1/Makefile @@ -1,5 +1,5 @@ PORTNAME= libusb1 -PORTVERSION= 2.0.1 +PORTVERSION= 3.0.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -11,12 +11,12 @@ LICENSE= LGPL21+ TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pyinstaller>=0:devel/py-pyinstaller@${PY_FLAVOR} -USES= python:3.6+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes do-test: - @${PYTHON_CMD} -m unittest discover -vs ${TEST_WRKSRC} + cd ${WRKSRC} && ${PYTHON_CMD} -m unittest -v .include diff --git a/devel/py-libusb1/distinfo b/devel/py-libusb1/distinfo index dd367209993..dc163b81f2a 100644 --- a/devel/py-libusb1/distinfo +++ b/devel/py-libusb1/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971066 -SHA256 (libusb1-2.0.1.tar.gz) = d3ba82ecf7ab6a48d21dac6697e26504670cc3522b8e5941bd28fb56cf3f6c46 -SIZE (libusb1-2.0.1.tar.gz) = 82619 +TIMESTAMP = 1647264544 +SHA256 (libusb1-3.0.0.tar.gz) = 5792a9defee40f15d330a40d9b1800545c32e47ba7fc66b6f28f133c9fcc8538 +SIZE (libusb1-3.0.0.tar.gz) = 82499 diff --git a/devel/py-libusb1/files/patch-usb1_____init____.py b/devel/py-libusb1/files/patch-usb1_____init____.py index bd827541152..42a8d9b7e74 100644 --- a/devel/py-libusb1/files/patch-usb1_____init____.py +++ b/devel/py-libusb1/files/patch-usb1_____init____.py @@ -1,8 +1,8 @@ https://lists.freebsd.org/pipermail/freebsd-usb/2015-January/013586.html ---- usb1/__init__.py.orig 2021-09-24 22:30:54 UTC +--- usb1/__init__.py.orig 2022-02-06 04:49:47 UTC +++ usb1/__init__.py -@@ -1885,8 +1885,10 @@ class USBDevice: +@@ -1884,8 +1884,10 @@ class USBDevice: Get the port number of each hub toward device. """ port_list = (c_uint8 * PATH_MAX_DEPTH)() diff --git a/devel/py-marshmallow/Makefile b/devel/py-marshmallow/Makefile index ca573e5b76c..7e2473ebdbd 100644 --- a/devel/py-marshmallow/Makefile +++ b/devel/py-marshmallow/Makefile @@ -1,7 +1,7 @@ # Created by: Mark Felder PORTNAME= marshmallow -PORTVERSION= 3.14.1 +PORTVERSION= 3.15.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,10 +12,9 @@ COMMENT= Simplified object serialization for Python LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}dateutil>=2.7.0:devel/py-dateutil@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}simplejson>=0:devel/py-simplejson@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}packaging>=0:devel/py-packaging@${PY_FLAVOR} -USES= cpe python:3.6+ +USES= cpe python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/devel/py-marshmallow/distinfo b/devel/py-marshmallow/distinfo index d6bfe387879..6264308ab2d 100644 --- a/devel/py-marshmallow/distinfo +++ b/devel/py-marshmallow/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637072966 -SHA256 (marshmallow-3.14.1.tar.gz) = 4c05c1684e0e97fe779c62b91878f173b937fe097b356cd82f793464f5bc6138 -SIZE (marshmallow-3.14.1.tar.gz) = 179183 +TIMESTAMP = 1647264546 +SHA256 (marshmallow-3.15.0.tar.gz) = 2aaaab4f01ef4f5a011a21319af9fce17ab13bf28a026d1252adab0e035648d5 +SIZE (marshmallow-3.15.0.tar.gz) = 179483 diff --git a/devel/py-minimongo/files/patch-2to3 b/devel/py-minimongo/files/patch-2to3 new file mode 100644 index 00000000000..c1d52c0b4e3 --- /dev/null +++ b/devel/py-minimongo/files/patch-2to3 @@ -0,0 +1,29 @@ +--- minimongo/config.py.orig 2011-04-13 04:54:06 UTC ++++ minimongo/config.py +@@ -31,7 +31,7 @@ def _resolve_name(name, package, level): + if not hasattr(package, 'rindex'): + raise ValueError("'package' not set to a string") + dot = len(package) +- for x in xrange(level, 1, -1): ++ for x in range(level, 1, -1): + try: + dot = package.rindex('.', 0, dot) + except ValueError: +@@ -66,7 +66,7 @@ if __name__ != '__main__': + + try: + settings_modules.append(os.environ['MINIMONGO_SETTINGS_MODULE']) +- except KeyError, e: ++ except KeyError as e: + pass + + # Here are the other 2 places that we try to import configs from: +@@ -81,7 +81,7 @@ if __name__ != '__main__': + # Once we get a successfull config module import, we break out + # of the loop above. + break +- except ImportError, exc: ++ except ImportError as exc: + # Error importing this modlue, so we continue + pass + diff --git a/devel/py-mongokit/files/patch-2to3 b/devel/py-mongokit/files/patch-2to3 new file mode 100644 index 00000000000..a5d463207e8 --- /dev/null +++ b/devel/py-mongokit/files/patch-2to3 @@ -0,0 +1,272 @@ +--- mongokit/document.py.orig 2014-02-16 10:19:42 UTC ++++ mongokit/document.py +@@ -82,9 +82,9 @@ class DocumentProperties(SchemaProperties): + if 'fields' not in index: + raise BadIndexError( + "'fields' key must be specify in indexes") +- for key, value in index.iteritems(): ++ for key, value in index.items(): + if key == "fields": +- if isinstance(value, basestring): ++ if isinstance(value, str): + if value not in attrs['_namespaces'] and value not in STRUCTURE_KEYWORDS: + raise ValueError( + "Error in indexes: can't find %s in structure" % value) +@@ -93,11 +93,11 @@ class DocumentProperties(SchemaProperties): + raise BadIndexError( + "Error in indexes: a tuple must contain " + "only two value : the field name and the direction") +- if not (isinstance(value[1], int) or isinstance(value[1], basestring)): ++ if not (isinstance(value[1], int) or isinstance(value[1], str)): + raise BadIndexError( + "Error in %s, the direction must be int or basestring " + "(got %s instead)" % (value[0], type(value[1]))) +- if not isinstance(value[0], basestring): ++ if not isinstance(value[0], str): + raise BadIndexError( + "Error in %s, the field name must be string " + "(got %s instead)" % (value[0], type(value[0]))) +@@ -135,10 +135,8 @@ class DocumentProperties(SchemaProperties): + assert isinstance(value, int) + + +-class Document(SchemaDocument): ++class Document(SchemaDocument, metaclass=DocumentProperties): + +- __metaclass__ = DocumentProperties +- + type_field = '_type' + + atomic_save = False # XXX Deprecated +@@ -166,7 +164,7 @@ class Document(SchemaDocument): + super(Document, self).__init__(doc=doc, gen_skel=gen_skel, gen_auth_types=False, + lang=lang, fallback_lang=fallback_lang) + if self.type_field in self: +- self[self.type_field] = unicode(self.__class__.__name__) ++ self[self.type_field] = str(self.__class__.__name__) + # collection + self.collection = collection + if collection: +@@ -236,11 +234,11 @@ class Document(SchemaDocument): + error = None + try: + super(Document, self).validate() +- except StructureError, e: ++ except StructureError as e: + error = e +- except KeyError, e: ++ except KeyError as e: + error = e +- except SchemaTypeError, e: ++ except SchemaTypeError as e: + error = e + if error: + if not self.migration_handler: +@@ -313,7 +311,7 @@ class Document(SchemaDocument): + raise MultipleResultsFound("%s results found" % count) + elif count == 1: + try: +- doc = bson_obj.next() ++ doc = next(bson_obj) + except StopIteration: + doc = None + return doc +@@ -326,7 +324,7 @@ class Document(SchemaDocument): + max = self.collection.count() + if max: + num = random.randint(0, max-1) +- return self.find().skip(num).next() ++ return next(self.find().skip(num)) + + def find_fulltext(self, search, **kwargs): + """ +@@ -376,7 +374,7 @@ class Document(SchemaDocument): + if count > 1: + raise MultipleResultsFound("%s results found" % count) + elif count == 1: +- return bson_obj.next() ++ return next(bson_obj) + + def reload(self): + """ +@@ -423,7 +421,7 @@ class Document(SchemaDocument): + self._make_reference(self, self.structure) + if '_id' not in self: + if uuid: +- self['_id'] = unicode("%s-%s" % (self.__class__.__name__, uuid4())) ++ self['_id'] = str("%s-%s" % (self.__class__.__name__, uuid4())) + self._process_custom_type('bson', self, self.structure) + self.collection.save(self, safe=safe, *args, **kwargs) + self._process_custom_type('python', self, self.structure) +@@ -453,12 +451,12 @@ class Document(SchemaDocument): + + if isinstance(given_fields, tuple): + fields = [given_fields] +- elif isinstance(given_fields, basestring): ++ elif isinstance(given_fields, str): + fields = [(given_fields, 1)] + else: + fields = [] + for field in given_fields: +- if isinstance(field, basestring): ++ if isinstance(field, str): + field = (field, 1) + fields.append(field) + log.debug('Creating index for %s' % str(given_fields)) +@@ -536,7 +534,7 @@ class Document(SchemaDocument): + raise ImportError("can't import anyjson. Please install it before continuing.") + obj = self.to_json_type() + _convert_to_python(obj, self.structure) +- return unicode(dumps(obj)) ++ return str(dumps(obj)) + + def from_json(self, json): + """ +@@ -700,7 +698,7 @@ class Document(SchemaDocument): + # it with None values + # + if len(struct[key]) and \ +- not [i for i in struct[key].keys() if type(i) is type]: ++ not [i for i in list(struct[key].keys()) if type(i) is type]: + if key in doc: + self._make_reference(doc[key], struct[key], new_path) + else: # case {unicode:int} +--- mongokit/migration.py.orig 2014-02-16 10:19:42 UTC ++++ mongokit/migration.py +@@ -47,7 +47,7 @@ class DocumentMigration(object): + + def validate_update(self, update_query): + structure = DotCollapsedDict(self.doc_class.structure) +- for op, fields in update_query.iteritems(): ++ for op, fields in update_query.items(): + for field in fields: + if op != '$unset' and op != '$rename': + if field not in structure: +@@ -84,7 +84,7 @@ class DocumentMigration(object): + collection.update(self.target, self.update, multi=True, safe=safe) + status = collection.database.last_status() + if not status.get('updatedExisting', 1): +- print "%s : %s >>> deprecated" % (self.__class__.__name__, method_name) ++ print("%s : %s >>> deprecated" % (self.__class__.__name__, method_name)) + + def get_deprecated(self, collection): + method_names = sorted([i for i in dir(self) if i.startswith('migration') or i.startswith('allmigration')]) +--- mongokit/schema_document.py.orig 2014-02-16 10:19:42 UTC ++++ mongokit/schema_document.py +@@ -32,8 +32,8 @@ from copy import deepcopy + + log = logging.getLogger(__name__) + +-from operators import SchemaOperator, IS +-from helpers import * ++from .operators import SchemaOperator, IS ++from .helpers import * + + __all__ = [ + 'CustomType', +@@ -230,7 +230,7 @@ class SchemaProperties(type): + raise ValueError("Error in i18n: can't find %s in structure" % i18n) + + +-class SchemaDocument(dict): ++class SchemaDocument(dict, metaclass=SchemaProperties): + """ + A SchemaDocument is dictionary with a building structured schema + The validate method will check that the document match the underling +@@ -301,7 +301,6 @@ class SchemaDocument(dict): + >>> doc + {"foo":{"bar":u"bla}} + """ +- __metaclass__ = SchemaProperties + + structure = None + required_fields = [] +@@ -324,10 +323,10 @@ class SchemaDocument(dict): + type(None), + bool, + int, +- long, ++ int, + float, +- unicode, +- basestring, ++ str, ++ str, + list, + dict, + datetime.datetime, +@@ -351,7 +350,7 @@ class SchemaDocument(dict): + self.validation_errors = {} + # init + if doc: +- for k, v in doc.iteritems(): ++ for k, v in doc.items(): + self[k] = v + gen_skel = False + if gen_skel: +@@ -421,7 +420,7 @@ class SchemaDocument(dict): + @classmethod + def __walk_dict(cls, dic): + # thanks jean_b for the patch +- for key, value in dic.items(): ++ for key, value in list(dic.items()): + if isinstance(value, dict) and len(value): + if type(key) is type: + yield '$%s' % key.__name__ +@@ -478,7 +477,7 @@ class SchemaDocument(dict): + raise StructureError("%s: %s is not an authorized type" % (name, struct)) + elif isinstance(struct, dict): + for key in struct: +- if isinstance(key, basestring): ++ if isinstance(key, str): + if "." in key: + raise BadKeyError("%s: %s must not contain '.'" % (name, key)) + if key.startswith('$'): +@@ -642,7 +641,7 @@ class SchemaDocument(dict): + def _process_validators(self, doc, struct, path=""): + doted_struct = DotCollapsedDict(self.structure) + doted_doc = DotCollapsedDict(doc) +- for key, validators in self.validators.iteritems(): ++ for key, validators in self.validators.items(): + if key in doted_doc and doted_doc[key] is not None: + if not hasattr(validators, "__iter__"): + validators = [validators] +@@ -650,9 +649,9 @@ class SchemaDocument(dict): + try: + if not validator(doted_doc[key]): + raise ValidationError("%s does not pass the validator " + validator.__name__) +- except Exception, e: ++ except Exception as e: + self._raise_exception(ValidationError, key, +- unicode(e) % key) ++ str(e) % key) + + def _process_custom_type(self, target, doc, struct, path="", root_path=""): + for key in struct: +@@ -751,7 +750,7 @@ class SchemaDocument(dict): + # if the dict is still empty into the document we build + # it with None values + # +- if len(struct[key]) and not [i for i in struct[key].keys() if type(i) is type]: ++ if len(struct[key]) and not [i for i in list(struct[key].keys()) if type(i) is type]: + self._set_default_fields(doc[key], struct[key], new_path) + else: + if new_path in self.default_values: +@@ -912,7 +911,7 @@ class i18n(dict, CustomType): + + def to_bson(self, value): + if value is not None: +- for l, v in value.iteritems(): ++ for l, v in value.items(): + if isinstance(v, list) and isinstance(self._field_type, list): + for i in v: + if not isinstance(i, self._field_type[0]): +@@ -922,7 +921,7 @@ class i18n(dict, CustomType): + if not isinstance(v, self._field_type): + raise SchemaTypeError("%s (%s) must be an instance of %s not %s" % ( + self._field_name, l, self._field_type, type(v).__name__)) +- return [{'lang': l, 'value': v} for l, v in value.iteritems()] ++ return [{'lang': l, 'value': v} for l, v in value.items()] + + def to_python(self, value): + if value is not None: diff --git a/devel/py-moto/Makefile b/devel/py-moto/Makefile index e86e0068f88..d7811ec4d7c 100644 --- a/devel/py-moto/Makefile +++ b/devel/py-moto/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= moto -PORTVERSION= 3.0.4 +PORTVERSION= 3.1.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-moto/distinfo b/devel/py-moto/distinfo index 5439a6a1d7f..ab9b627b2ac 100644 --- a/devel/py-moto/distinfo +++ b/devel/py-moto/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057970 -SHA256 (moto-3.0.4.tar.gz) = 168b8a3cb4dd8a6df8e51d582761cefa9657b9f45ac7e1eb24dae394ebc9e000 -SIZE (moto-3.0.4.tar.gz) = 2204895 +TIMESTAMP = 1647264548 +SHA256 (moto-3.1.0.tar.gz) = c4327b34228cc3d446e8e5434af5f997ab4c5f85cdf93a201833dca1ca0c0717 +SIZE (moto-3.1.0.tar.gz) = 2231450 diff --git a/devel/py-nbclassic/Makefile b/devel/py-nbclassic/Makefile index 8aa013fe515..195b82092ef 100644 --- a/devel/py-nbclassic/Makefile +++ b/devel/py-nbclassic/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= nbclassic -PORTVERSION= 0.3.5 +PORTVERSION= 0.3.6 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,7 +13,8 @@ LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}jupyter-server>=1.8<2:devel/py-jupyter-server@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}notebook>=0<7:www/py-notebook@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}notebook>=0<7:www/py-notebook@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}notebook-shim>=0.1.0:devel/py-notebook-shim@${PY_FLAVOR} USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils diff --git a/devel/py-nbclassic/distinfo b/devel/py-nbclassic/distinfo index 39e6d42763e..5639bcf728f 100644 --- a/devel/py-nbclassic/distinfo +++ b/devel/py-nbclassic/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642102285 -SHA256 (nbclassic-0.3.5.tar.gz) = 99444dd63103af23c788d9b5172992f12caf8c3098dd5a35c787f0df31490c29 -SIZE (nbclassic-0.3.5.tar.gz) = 23584 +TIMESTAMP = 1647264550 +SHA256 (nbclassic-0.3.6.tar.gz) = 7dbac0a6cb71bbe3afa1fe89369e6e3174d89f42aa08216a327046de45aa9a4f +SIZE (nbclassic-0.3.6.tar.gz) = 15008 diff --git a/devel/py-nbclient/Makefile b/devel/py-nbclient/Makefile index 629dcae9820..9a1407053c8 100644 --- a/devel/py-nbclient/Makefile +++ b/devel/py-nbclient/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= nbclient -PORTVERSION= 0.5.11 +PORTVERSION= 0.5.13 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -15,7 +15,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}jupyter-client>=6.1.5:devel/py-jupyter-client@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}nbformat>=5.0:devel/py-nbformat@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}nest-asyncio>=0:devel/py-nest-asyncio@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}traitlets>=4.2:devel/py-traitlets@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}traitlets>=5.0.0:devel/py-traitlets@${PY_FLAVOR} USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils diff --git a/devel/py-nbclient/distinfo b/devel/py-nbclient/distinfo index f656a5514a9..0d8e5562000 100644 --- a/devel/py-nbclient/distinfo +++ b/devel/py-nbclient/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057972 -SHA256 (nbclient-0.5.11.tar.gz) = 751516992f34b58172bad54eef1e4bf7e4f4460d58e255ca1a4e5c9649476007 -SIZE (nbclient-0.5.11.tar.gz) = 75498 +TIMESTAMP = 1647264552 +SHA256 (nbclient-0.5.13.tar.gz) = 40c52c9b5e3c31faecaee69f202b3f53e38d7c1c563de0fadde9d7eda0fdafe8 +SIZE (nbclient-0.5.13.tar.gz) = 75191 diff --git a/devel/py-nbconvert/Makefile b/devel/py-nbconvert/Makefile index ac10779045b..d0684c1d5b6 100644 --- a/devel/py-nbconvert/Makefile +++ b/devel/py-nbconvert/Makefile @@ -1,5 +1,5 @@ PORTNAME= nbconvert -PORTVERSION= 6.4.1 +PORTVERSION= 6.4.4 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,7 +10,8 @@ COMMENT= Converting Jupyter Notebooks LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}bleach>=0:www/py-bleach@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}beautifulsoup>=0:www/py-beautifulsoup@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}bleach>=0:www/py-bleach@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}defusedxml>=0:devel/py-defusedxml@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}entrypoints>=0.2.2:devel/py-entrypoints@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}Jinja2>=2.4:devel/py-Jinja2@${PY_FLAVOR} \ diff --git a/devel/py-nbconvert/distinfo b/devel/py-nbconvert/distinfo index 32154363c1c..780c06bad0c 100644 --- a/devel/py-nbconvert/distinfo +++ b/devel/py-nbconvert/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643401024 -SHA256 (nbconvert-6.4.1.tar.gz) = 7dce3f977c2f9651841a3c49b5b7314c742f24dd118b99e51b8eec13c504f555 -SIZE (nbconvert-6.4.1.tar.gz) = 900632 +TIMESTAMP = 1647264990 +SHA256 (nbconvert-6.4.4.tar.gz) = ee0dfe34bbd1082ac9bfc750aae3c73fcbc34a70c5574c6986ff83c10a3541fd +SIZE (nbconvert-6.4.4.tar.gz) = 906988 diff --git a/devel/py-nbformat/Makefile b/devel/py-nbformat/Makefile index d9c5b0421ea..cef3370d48a 100644 --- a/devel/py-nbformat/Makefile +++ b/devel/py-nbformat/Makefile @@ -1,5 +1,5 @@ PORTNAME= nbformat -PORTVERSION= 5.1.3 +PORTVERSION= 5.2.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,8 +10,7 @@ COMMENT= Jupyter Notebook format LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/COPYING.md -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}ipython_genutils>=0:devel/py-ipython_genutils@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}jsonschema>=2.4:devel/py-jsonschema@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}jsonschema>=2.4:devel/py-jsonschema@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}jupyter-core>=0:devel/py-jupyter-core@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}sqlite3>=0:databases/py-sqlite3@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}traitlets>=4.1:devel/py-traitlets@${PY_FLAVOR} @@ -19,7 +18,7 @@ TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0:devel/py-pytest@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pytest-cov>=0:devel/py-pytest-cov@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}testpath>=0:devel/py-testpath@${PY_FLAVOR} -USES= python:3.5+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/devel/py-nbformat/distinfo b/devel/py-nbformat/distinfo index 84460632842..48e9138669e 100644 --- a/devel/py-nbformat/distinfo +++ b/devel/py-nbformat/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1624630218 -SHA256 (nbformat-5.1.3.tar.gz) = b516788ad70771c6250977c1374fcca6edebe6126fd2adb5a69aa5c2356fd1c8 -SIZE (nbformat-5.1.3.tar.gz) = 73703 +TIMESTAMP = 1647264992 +SHA256 (nbformat-5.2.0.tar.gz) = 93df0b9c67221d38fb970c48f6d361819a6c388299a0ef3171bbb912edfe1324 +SIZE (nbformat-5.2.0.tar.gz) = 137745 diff --git a/devel/py-notebook-shim/Makefile b/devel/py-notebook-shim/Makefile new file mode 100644 index 00000000000..83335b4b2df --- /dev/null +++ b/devel/py-notebook-shim/Makefile @@ -0,0 +1,23 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= notebook-shim +PORTVERSION= 0.1.0 +CATEGORIES= devel python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +DISTNAME= notebook_shim-${PORTVERSION} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Shim layer for notebook traits and config + +LICENSE= BSD3CLAUSE +LICENSE_FILE= ${WRKSRC}/LICENSE + +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}jupyter-server>=1.8<2:devel/py-jupyter-server@${PY_FLAVOR} + +USES= python:3.7+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +.include diff --git a/devel/py-notebook-shim/distinfo b/devel/py-notebook-shim/distinfo new file mode 100644 index 00000000000..e2dc59d9811 --- /dev/null +++ b/devel/py-notebook-shim/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264402 +SHA256 (notebook_shim-0.1.0.tar.gz) = 7897e47a36d92248925a2143e3596f19c60597708f7bef50d81fcd31d7263e85 +SIZE (notebook_shim-0.1.0.tar.gz) = 12275 diff --git a/devel/py-notebook-shim/pkg-descr b/devel/py-notebook-shim/pkg-descr new file mode 100644 index 00000000000..614cb0bc70b --- /dev/null +++ b/devel/py-notebook-shim/pkg-descr @@ -0,0 +1,4 @@ +Notebook Shim provides a way for JupyterLab and other frontends to switch to +Jupyter Server for their Python Web application backend. + +WWW: https://github.com/jupyterlab/notebook_shim diff --git a/devel/py-omnijson/files/patch-2to3 b/devel/py-omnijson/files/patch-2to3 new file mode 100644 index 00000000000..72fcd402ce7 --- /dev/null +++ b/devel/py-omnijson/files/patch-2to3 @@ -0,0 +1,209 @@ +--- omnijson/packages/simplejson/decoder.py.orig 2011-06-19 16:43:47 UTC ++++ omnijson/packages/simplejson/decoder.py +@@ -87,8 +87,8 @@ _CONSTANTS = { + + STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) + BACKSLASH = { +- '"': u'"', '\\': u'\\', '/': u'/', +- 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t', ++ '"': '"', '\\': '\\', '/': '/', ++ 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', + } + + DEFAULT_ENCODING = "utf-8" +@@ -117,8 +117,8 @@ def py_scanstring(s, end, encoding=None, strict=True, + content, terminator = chunk.groups() + # Content is contains zero or more unescaped string characters + if content: +- if not isinstance(content, unicode): +- content = unicode(content, encoding) ++ if not isinstance(content, str): ++ content = str(content, encoding) + _append(content) + # Terminator is the end of string, a literal control character, + # or a backslash denoting that an escape sequence follows +@@ -164,11 +164,11 @@ def py_scanstring(s, end, encoding=None, strict=True, + uni2 = int(esc2, 16) + uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) + next_end += 6 +- char = unichr(uni) ++ char = chr(uni) + end = next_end + # Append the unescaped character + _append(char) +- return u''.join(chunks), end ++ return ''.join(chunks), end + + + # Use speedup if available +@@ -177,10 +177,11 @@ scanstring = c_scanstring or py_scanstring + WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) + WHITESPACE_STR = ' \t\n\r' + +-def JSONObject((s, end), encoding, strict, scan_once, object_hook, ++def JSONObject(xxx_todo_changeme, encoding, strict, scan_once, object_hook, + object_pairs_hook, memo=None, + _w=WHITESPACE.match, _ws=WHITESPACE_STR): + # Backwards compatibility ++ (s, end) = xxx_todo_changeme + if memo is None: + memo = {} + memo_get = memo.setdefault +@@ -269,7 +270,8 @@ def JSONObject((s, end), encoding, strict, scan_once, + pairs = object_hook(pairs) + return pairs, end + +-def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): ++def JSONArray(xxx_todo_changeme1, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): ++ (s, end) = xxx_todo_changeme1 + values = [] + nextchar = s[end:end + 1] + if nextchar in _ws: +--- omnijson/packages/simplejson/encoder.py.orig 2011-06-19 16:43:47 UTC ++++ omnijson/packages/simplejson/encoder.py +@@ -39,7 +39,7 @@ def encode_basestring(s): + s = s.decode('utf-8') + def replace(match): + return ESCAPE_DCT[match.group(0)] +- return u'"' + ESCAPE.sub(replace, s) + u'"' ++ return '"' + ESCAPE.sub(replace, s) + '"' + + + def py_encode_basestring_ascii(s): +@@ -160,7 +160,7 @@ class JSONEncoder(object): + self.allow_nan = allow_nan + self.sort_keys = sort_keys + self.use_decimal = use_decimal +- if isinstance(indent, (int, long)): ++ if isinstance(indent, int): + indent = ' ' * indent + self.indent = indent + if separators is not None: +@@ -200,7 +200,7 @@ class JSONEncoder(object): + + """ + # This is for extremely simple cases and benchmarks. +- if isinstance(o, basestring): ++ if isinstance(o, str): + if isinstance(o, str): + _encoding = self.encoding + if (_encoding is not None +@@ -219,7 +219,7 @@ class JSONEncoder(object): + if self.ensure_ascii: + return ''.join(chunks) + else: +- return u''.join(chunks) ++ return ''.join(chunks) + + def iterencode(self, o, _one_shot=False): + """Encode the given object and yield each string +@@ -302,7 +302,7 @@ class JSONEncoderForHTML(JSONEncoder): + if self.ensure_ascii: + return ''.join(chunks) + else: +- return u''.join(chunks) ++ return ''.join(chunks) + + def iterencode(self, o, _one_shot=False): + chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot) +@@ -317,10 +317,8 @@ def _make_iterencode(markers, _default, _encoder, _ind + _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, + _use_decimal, + ## HACK: hand-optimized bytecode; turn globals into locals +- False=False, +- True=True, + ValueError=ValueError, +- basestring=basestring, ++ str=str, + Decimal=Decimal, + dict=dict, + float=float, +@@ -328,8 +326,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + int=int, + isinstance=isinstance, + list=list, +- long=long, +- str=str, ++ long=int, + tuple=tuple, + ): + +@@ -357,7 +354,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + first = False + else: + buf = separator +- if isinstance(value, basestring): ++ if isinstance(value, str): + yield buf + _encoder(value) + elif value is None: + yield buf + 'null' +@@ -365,7 +362,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield buf + 'true' + elif value is False: + yield buf + 'false' +- elif isinstance(value, (int, long)): ++ elif isinstance(value, int): + yield buf + str(value) + elif isinstance(value, float): + yield buf + _floatstr(value) +@@ -408,12 +405,12 @@ def _make_iterencode(markers, _default, _encoder, _ind + item_separator = _item_separator + first = True + if _sort_keys: +- items = dct.items() ++ items = list(dct.items()) + items.sort(key=lambda kv: kv[0]) + else: +- items = dct.iteritems() ++ items = iter(dct.items()) + for key, value in items: +- if isinstance(key, basestring): ++ if isinstance(key, str): + pass + # JavaScript is weakly typed for these, so it makes sense to + # also allow them. Many encoders seem to do something like this. +@@ -425,7 +422,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + key = 'false' + elif key is None: + key = 'null' +- elif isinstance(key, (int, long)): ++ elif isinstance(key, int): + key = str(key) + elif _skipkeys: + continue +@@ -437,7 +434,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield item_separator + yield _encoder(key) + yield _key_separator +- if isinstance(value, basestring): ++ if isinstance(value, str): + yield _encoder(value) + elif value is None: + yield 'null' +@@ -445,7 +442,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield 'true' + elif value is False: + yield 'false' +- elif isinstance(value, (int, long)): ++ elif isinstance(value, int): + yield str(value) + elif isinstance(value, float): + yield _floatstr(value) +@@ -468,7 +465,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + del markers[markerid] + + def _iterencode(o, _current_indent_level): +- if isinstance(o, basestring): ++ if isinstance(o, str): + yield _encoder(o) + elif o is None: + yield 'null' +@@ -476,7 +473,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield 'true' + elif o is False: + yield 'false' +- elif isinstance(o, (int, long)): ++ elif isinstance(o, int): + yield str(o) + elif isinstance(o, float): + yield _floatstr(o) diff --git a/devel/py-optik/files/patch-2to3 b/devel/py-optik/files/patch-2to3 new file mode 100644 index 00000000000..1103787507b --- /dev/null +++ b/devel/py-optik/files/patch-2to3 @@ -0,0 +1,352 @@ +--- lib/help.py.orig 2006-07-23 15:21:30 UTC ++++ lib/help.py +@@ -8,7 +8,7 @@ to generate formatted help text. + # See the README.txt distributed with Optik for licensing terms. + + import os +-import textwrap ++from . import textwrap + from optik.option import NO_DEFAULT + from optik.errors import gettext as _ + +@@ -110,10 +110,10 @@ class HelpFormatter: + self.level -= 1 + + def format_usage(self, usage): +- raise NotImplementedError, "subclasses must implement" ++ raise NotImplementedError("subclasses must implement") + + def format_heading(self, heading): +- raise NotImplementedError, "subclasses must implement" ++ raise NotImplementedError("subclasses must implement") + + def _format_text(self, text): + """ +--- lib/option.py.orig 2006-06-11 16:22:02 UTC ++++ lib/option.py +@@ -14,14 +14,8 @@ __revision__ = "$Id: option.py 522 2006-06-11 16:22:03 + + __all__ = ['Option'] + +-# Do the right thing with boolean values for all known Python versions. +-try: +- True, False +-except NameError: +- (True, False) = (1, 0) ++_idmax = 2 * sys.maxsize + 1 + +-_idmax = 2L * sys.maxint + 1 +- + def _repr(self): + return "<%s at 0x%x: %s>" % (self.__class__.__name__, + id(self) & _idmax, +@@ -44,7 +38,7 @@ def _parse_int(val): + return _parse_num(val, int) + + def _parse_long(val): +- return _parse_num(val, long) ++ return _parse_num(val, int) + + _builtin_cvt = { "int" : (_parse_int, _("integer")), + "long" : (_parse_long, _("long integer")), +@@ -211,7 +205,7 @@ class Option: + # Filter out None because early versions of Optik had exactly + # one short option and one long option, either of which + # could be None. +- opts = filter(None, opts) ++ opts = [_f for _f in opts if _f] + if not opts: + raise TypeError("at least one option string must be supplied") + return opts +@@ -239,7 +233,7 @@ class Option: + + def _set_attrs(self, attrs): + for attr in self.ATTRS: +- if attrs.has_key(attr): ++ if attr in attrs: + setattr(self, attr, attrs[attr]) + del attrs[attr] + else: +@@ -248,7 +242,7 @@ class Option: + else: + setattr(self, attr, None) + if attrs: +- attrs = attrs.keys() ++ attrs = list(attrs.keys()) + attrs.sort() + raise OptionError( + "invalid keyword arguments: %s" % ", ".join(attrs), +@@ -278,8 +272,8 @@ class Option: + # complicated check of __builtin__ is only necessary for + # Python 2.1 and earlier, and is short-circuited by the + # first check on modern Pythons.) +- import __builtin__ +- if ( type(self.type) is types.TypeType or ++ import builtins ++ if ( type(self.type) is type or + (hasattr(self.type, "__name__") and + getattr(__builtin__, self.type.__name__, None) is self.type) ): + self.type = self.type.__name__ +@@ -298,7 +292,7 @@ class Option: + if self.choices is None: + raise OptionError( + "must supply a list of choices for type 'choice'", self) +- elif type(self.choices) not in (types.TupleType, types.ListType): ++ elif type(self.choices) not in (tuple, list): + raise OptionError( + "choices must be a list of strings ('%s' supplied)" + % str(type(self.choices)).split("'")[1], self) +@@ -342,12 +336,12 @@ class Option: + raise OptionError( + "callback not callable: %r" % self.callback, self) + if (self.callback_args is not None and +- type(self.callback_args) is not types.TupleType): ++ type(self.callback_args) is not tuple): + raise OptionError( + "callback_args, if supplied, must be a tuple: not %r" + % self.callback_args, self) + if (self.callback_kwargs is not None and +- type(self.callback_kwargs) is not types.DictType): ++ type(self.callback_kwargs) is not dict): + raise OptionError( + "callback_kwargs, if supplied, must be a dict: not %r" + % self.callback_kwargs, self) +@@ -444,7 +438,7 @@ class Option: + parser.print_version() + parser.exit() + else: +- raise RuntimeError, "unknown action %r" % self.action ++ raise RuntimeError("unknown action %r" % self.action) + + return 1 + +--- lib/option_parser.py.orig 2006-07-23 15:21:30 UTC ++++ lib/option_parser.py +@@ -23,20 +23,14 @@ __all__ = ['SUPPRESS_HELP', 'SUPPRESS_USAGE', + SUPPRESS_HELP = "SUPPRESS"+"HELP" + SUPPRESS_USAGE = "SUPPRESS"+"USAGE" + +-# For compatibility with Python 2.2 +-try: +- True, False +-except NameError: +- (True, False) = (1, 0) +- + def isbasestring(x): +- return isinstance(x, types.StringType) or isinstance(x, types.UnicodeType) ++ return isinstance(x, bytes) or isinstance(x, str) + + class Values: + + def __init__(self, defaults=None): + if defaults: +- for (attr, val) in defaults.items(): ++ for (attr, val) in list(defaults.items()): + setattr(self, attr, val) + + def __str__(self): +@@ -47,7 +41,7 @@ class Values: + def __cmp__(self, other): + if isinstance(other, Values): + return cmp(self.__dict__, other.__dict__) +- elif isinstance(other, types.DictType): ++ elif isinstance(other, dict): + return cmp(self.__dict__, other) + else: + return -1 +@@ -60,7 +54,7 @@ class Values: + are silently ignored. + """ + for attr in dir(self): +- if dict.has_key(attr): ++ if attr in dict: + dval = dict[attr] + if dval is not None: + setattr(self, attr, dval) +@@ -79,7 +73,7 @@ class Values: + elif mode == "loose": + self._update_loose(dict) + else: +- raise ValueError, "invalid update mode: %r" % mode ++ raise ValueError("invalid update mode: %r" % mode) + + def read_module(self, modname, mode="careful"): + __import__(modname) +@@ -88,7 +82,7 @@ class Values: + + def read_file(self, filename, mode="careful"): + vars = {} +- execfile(filename, vars) ++ exec(compile(open(filename, "rb").read(), filename, 'exec'), vars) + self._update(vars, mode) + + def ensure_value(self, attr, value): +@@ -158,7 +152,7 @@ class OptionContainer: + + def set_conflict_handler(self, handler): + if handler not in ("error", "resolve"): +- raise ValueError, "invalid conflict_resolution value %r" % handler ++ raise ValueError("invalid conflict_resolution value %r" % handler) + self.conflict_handler = handler + + def set_description(self, description): +@@ -180,10 +174,10 @@ class OptionContainer: + def _check_conflict(self, option): + conflict_opts = [] + for opt in option._short_opts: +- if self._short_opt.has_key(opt): ++ if opt in self._short_opt: + conflict_opts.append((opt, self._short_opt[opt])) + for opt in option._long_opts: +- if self._long_opt.has_key(opt): ++ if opt in self._long_opt: + conflict_opts.append((opt, self._long_opt[opt])) + + if conflict_opts: +@@ -208,14 +202,14 @@ class OptionContainer: + """add_option(Option) + add_option(opt_str, ..., kwarg=val, ...) + """ +- if type(args[0]) is types.StringType: ++ if type(args[0]) is bytes: + option = self.option_class(*args, **kwargs) + elif len(args) == 1 and not kwargs: + option = args[0] + if not isinstance(option, Option): +- raise TypeError, "not an Option instance: %r" % option ++ raise TypeError("not an Option instance: %r" % option) + else: +- raise TypeError, "invalid arguments" ++ raise TypeError("invalid arguments") + + self._check_conflict(option) + +@@ -229,7 +223,7 @@ class OptionContainer: + if option.dest is not None: # option has a dest, we need a default + if option.default is not NO_DEFAULT: + self.defaults[option.dest] = option.default +- elif not self.defaults.has_key(option.dest): ++ elif option.dest not in self.defaults: + self.defaults[option.dest] = None + + return option +@@ -245,8 +239,8 @@ class OptionContainer: + self._long_opt.get(opt_str)) + + def has_option(self, opt_str): +- return (self._short_opt.has_key(opt_str) or +- self._long_opt.has_key(opt_str)) ++ return (opt_str in self._short_opt or ++ opt_str in self._long_opt) + + def remove_option(self, opt_str): + option = self._short_opt.get(opt_str) +@@ -519,16 +513,16 @@ class OptionParser (OptionContainer): + + def add_option_group(self, *args, **kwargs): + # XXX lots of overlap with OptionContainer.add_option() +- if type(args[0]) is types.StringType: ++ if type(args[0]) is bytes: + group = OptionGroup(self, *args, **kwargs) + elif len(args) == 1 and not kwargs: + group = args[0] + if not isinstance(group, OptionGroup): +- raise TypeError, "not an OptionGroup instance: %r" % group ++ raise TypeError("not an OptionGroup instance: %r" % group) + if group.parser is not self: +- raise ValueError, "invalid OptionGroup (wrong parser)" ++ raise ValueError("invalid OptionGroup (wrong parser)") + else: +- raise TypeError, "invalid arguments" ++ raise TypeError("invalid arguments") + + self.option_groups.append(group) + return group +@@ -582,7 +576,7 @@ class OptionParser (OptionContainer): + + try: + stop = self._process_args(largs, rargs, values) +- except (BadOptionError, OptionValueError), err: ++ except (BadOptionError, OptionValueError) as err: + self.error(str(err)) + + args = largs + rargs +@@ -784,7 +778,7 @@ class OptionParser (OptionContainer): + or not defined. + """ + if self.usage: +- print >>file, self.get_usage() ++ print(self.get_usage(), file=file) + + def get_version(self): + if self.version: +@@ -801,7 +795,7 @@ class OptionParser (OptionContainer): + name. Does nothing if self.version is empty or undefined. + """ + if self.version: +- print >>file, self.get_version() ++ print(self.get_version(), file=file) + + def format_option_help(self, formatter=None): + if formatter is None: +@@ -864,11 +858,11 @@ def _match_abbrev(s, wordmap): + 'words', raise BadOptionError. + """ + # Is there an exact match? +- if wordmap.has_key(s): ++ if s in wordmap: + return s + else: + # Isolate all words with s as a prefix. +- possibilities = [word for word in wordmap.keys() ++ possibilities = [word for word in list(wordmap.keys()) + if word.startswith(s)] + # No exact match, so there had better be just one possibility. + if len(possibilities) == 1: +--- lib/textwrap.py.orig 2006-04-20 01:06:35 UTC ++++ lib/textwrap.py +@@ -10,14 +10,6 @@ __revision__ = "$Id: textwrap.py 39169 2005-07-15 06:5 + import string, re + import types + +-# Do the right thing with boolean values for all known Python versions +-# (so this module can be copied to projects that don't depend on Python +-# 2.3, e.g. Optik and Docutils). +-try: +- True, False +-except NameError: +- (True, False) = (1, 0) +- + __all__ = ['TextWrapper', 'wrap', 'fill'] + + # Hardcode the recognized whitespace characters to the US-ASCII +@@ -69,7 +61,7 @@ class TextWrapper: + whitespace_trans = string.maketrans(_whitespace, ' ' * len(_whitespace)) + + unicode_whitespace_trans = {} +- uspace = ord(u' ') ++ uspace = ord(' ') + for x in map(ord, _whitespace): + unicode_whitespace_trans[x] = uspace + +@@ -122,9 +114,9 @@ class TextWrapper: + if self.expand_tabs: + text = text.expandtabs() + if self.replace_whitespace: +- if isinstance(text, types.StringType): ++ if isinstance(text, bytes): + text = text.translate(self.whitespace_trans) +- elif isinstance(text, types.UnicodeType): ++ elif isinstance(text, str): + text = text.translate(self.unicode_whitespace_trans) + return text + +@@ -141,7 +133,7 @@ class TextWrapper: + 'use', ' ', 'the', ' ', '-b', ' ', 'option!' + """ + chunks = self.wordsep_re.split(text) +- chunks = filter(None, chunks) ++ chunks = [_f for _f in chunks if _f] + return chunks + + def _fix_sentence_endings(self, chunks): diff --git a/devel/py-ordereddict/Makefile b/devel/py-ordereddict/Makefile index 25f96faa285..62cb5145583 100644 --- a/devel/py-ordereddict/Makefile +++ b/devel/py-ordereddict/Makefile @@ -16,4 +16,6 @@ LICENSE_FILE= ${WRKSRC}/LICENSE USES= python:3.6+ USE_PYTHON= distutils autoplist +NO_ARCH= yes + .include diff --git a/devel/py-os-brick/Makefile b/devel/py-os-brick/Makefile index fc473477b04..15798bc90a8 100644 --- a/devel/py-os-brick/Makefile +++ b/devel/py-os-brick/Makefile @@ -23,7 +23,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}eventlet>=0.30.1:net/py-eventlet@${PY_FLAVOR ${PYTHON_PKGNAMEPREFIX}oslo.serialization>=4.2.0:devel/py-oslo.serialization@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}oslo.service>=2.8.0:devel/py-oslo.service@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}oslo.utils>=4.12.1:devel/py-oslo.utils@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pbr>=5.5.0:devel/py-pbr@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pbr>=5.8.0:devel/py-pbr@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}requests>=2.25.1:www/py-requests@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}tenacity>=6.3.1:devel/py-tenacity@${PY_FLAVOR} diff --git a/devel/py-os-brick/files/patch-requirements.txt b/devel/py-os-brick/files/patch-requirements.txt deleted file mode 100644 index 3082f781e17..00000000000 --- a/devel/py-os-brick/files/patch-requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ ---- requirements.txt.orig 2022-02-18 15:37:53 UTC -+++ requirements.txt -@@ -2,7 +2,7 @@ - # of appearance. Changing the order has an impact on the overall integration - # process, which may cause wedges in the gate later. - --pbr>=5.8.0 # Apache-2.0 -+pbr>=5.5.0 # Apache-2.0 - eventlet>=0.30.1,!=0.32.0 # MIT - oslo.concurrency>=4.5.0 # Apache-2.0 - oslo.context>=3.4.0 # Apache-2.0 diff --git a/devel/py-pastel/Makefile b/devel/py-pastel/Makefile index 1ab6e5c846b..9331f83f8f2 100644 --- a/devel/py-pastel/Makefile +++ b/devel/py-pastel/Makefile @@ -2,6 +2,7 @@ PORTNAME= pastel PORTVERSION= 0.2.1 +PORTREVISION= 1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,9 +13,14 @@ COMMENT= Bring colors to your terminal LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0,1:devel/py-pytest@${PY_FLAVOR} + USES= python:3.6+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +do-test: + cd ${WRKSRC} && ${PYTHON_CMD} -m pytest -rs -v + .include diff --git a/devel/py-pastel/files/patch-setup.py b/devel/py-pastel/files/patch-setup.py new file mode 100644 index 00000000000..88a2a4ab102 --- /dev/null +++ b/devel/py-pastel/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2020-09-16 19:19:42 UTC ++++ setup.py +@@ -2,7 +2,7 @@ + from setuptools import setup + + packages = \ +-['pastel', 'tests'] ++['pastel'] + + package_data = \ + {'': ['*']} diff --git a/devel/py-path/Makefile b/devel/py-path/Makefile index bc39116fb24..893a3a42a54 100644 --- a/devel/py-path/Makefile +++ b/devel/py-path/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= path -PORTVERSION= 16.3.0 +PORTVERSION= 16.4.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -19,4 +19,7 @@ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +post-patch: + @${CP} ${FILESDIR}/setup.py ${WRKSRC}/ + .include diff --git a/devel/py-path/distinfo b/devel/py-path/distinfo index f4750b3b110..6ce650f90c3 100644 --- a/devel/py-path/distinfo +++ b/devel/py-path/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642102291 -SHA256 (path-16.3.0.tar.gz) = 5d8c2063ffade0766e0aa04255f8c0a0fd0eda5fdfb190375871b2f2b2e4afd7 -SIZE (path-16.3.0.tar.gz) = 45183 +TIMESTAMP = 1647264554 +SHA256 (path-16.4.0.tar.gz) = baf2e757c4b19be8208f9e67e48fb475b4a577d5613590ce46693bdbdf082f52 +SIZE (path-16.4.0.tar.gz) = 48289 diff --git a/devel/py-path/files/setup.py b/devel/py-path/files/setup.py new file mode 100644 index 00000000000..606849326a4 --- /dev/null +++ b/devel/py-path/files/setup.py @@ -0,0 +1,3 @@ +from setuptools import setup + +setup() diff --git a/devel/py-pdm-pep517/Makefile b/devel/py-pdm-pep517/Makefile index d62b41fafbf..b53c611b7f5 100644 --- a/devel/py-pdm-pep517/Makefile +++ b/devel/py-pdm-pep517/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pdm-pep517 -PORTVERSION= 0.11.2 +PORTVERSION= 0.12.1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-pdm-pep517/distinfo b/devel/py-pdm-pep517/distinfo index 085a1b83b42..ff4bc35485a 100644 --- a/devel/py-pdm-pep517/distinfo +++ b/devel/py-pdm-pep517/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058010 -SHA256 (pdm-pep517-0.11.2.tar.gz) = e000bab43502c191d71808a2630dd44ece301a319d26e002a1caea3a7307cd20 -SIZE (pdm-pep517-0.11.2.tar.gz) = 200657 +TIMESTAMP = 1647264560 +SHA256 (pdm-pep517-0.12.1.tar.gz) = c3f9acfdc7832635628e94235320e0f6c19cbcd926eb041c454fb12463bc7504 +SIZE (pdm-pep517-0.12.1.tar.gz) = 291776 diff --git a/devel/py-pdm-pep517/files/setup.py b/devel/py-pdm-pep517/files/setup.py index 70bc8a6693d..ce232e0b752 100644 --- a/devel/py-pdm-pep517/files/setup.py +++ b/devel/py-pdm-pep517/files/setup.py @@ -20,7 +20,10 @@ setup_kwargs = { 'packages': [ 'pdm.pep517', 'pdm.pep517._vendor', + 'pdm.pep517._vendor.boolean', 'pdm.pep517._vendor.cerberus', + 'pdm.pep517._vendor.license_expression', + 'pdm.pep517._vendor.license_expression.data', 'pdm.pep517._vendor.packaging', 'pdm.pep517._vendor.pyparsing', 'pdm.pep517._vendor.pyparsing.diagram', @@ -32,7 +35,6 @@ setup_kwargs = { 'keywords': ['packaging', 'PEP 517', 'build'], 'classifiers': [ 'Development Status :: 3 - Alpha', - 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', diff --git a/devel/py-pdm/Makefile b/devel/py-pdm/Makefile index 0d105670656..9ddfbb0836a 100644 --- a/devel/py-pdm/Makefile +++ b/devel/py-pdm/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pdm -PORTVERSION= 1.13.3 +PORTVERSION= 1.13.4 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -17,7 +17,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}blinker>=0:devel/py-blinker@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}findpython>=0:devel/py-findpython@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}installer03>=0.3.0<0.4:devel/py-installer03@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}packaging>=0:devel/py-packaging@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pdm-pep517>=0.9<0.12:devel/py-pdm-pep517@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pdm-pep517>=0.9<1:devel/py-pdm-pep517@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pep517>=0.11.0:devel/py-pep517@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}platformdirs>=0:devel/py-platformdirs@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}python-dotenv>=0.15:www/py-python-dotenv@${PY_FLAVOR} \ diff --git a/devel/py-pdm/distinfo b/devel/py-pdm/distinfo index ed0ed7f3844..c5d63c6bb4c 100644 --- a/devel/py-pdm/distinfo +++ b/devel/py-pdm/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058008 -SHA256 (pdm-1.13.3.tar.gz) = e7e6e38e58e693700768356363336e0bb96492f9692daf7f82b2a07661110bb9 -SIZE (pdm-1.13.3.tar.gz) = 3129230 +TIMESTAMP = 1647264558 +SHA256 (pdm-1.13.4.tar.gz) = 5644fec425d1c0af04f135ad05b15b5a87be5a42e07f64e1706e3aac5dc89fcb +SIZE (pdm-1.13.4.tar.gz) = 3249658 diff --git a/devel/py-pdm/files/setup.py b/devel/py-pdm/files/setup.py index aa0a877e3e2..779e411dbb6 100644 --- a/devel/py-pdm/files/setup.py +++ b/devel/py-pdm/files/setup.py @@ -11,7 +11,7 @@ INSTALL_REQUIRE = [ 'importlib-metadata; python_version < "3.8"', 'installer>=0.3.0,<0.6', 'packaging', - 'pdm-pep517>=0.9,<0.12', + 'pdm-pep517>=0.9,<1', 'pep517>=0.11.0', 'platformdirs', 'python-dotenv>=0.15', diff --git a/devel/py-pip-api/Makefile b/devel/py-pip-api/Makefile index b2ff057a565..4a8f93172fe 100644 --- a/devel/py-pip-api/Makefile +++ b/devel/py-pip-api/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pip-api -PORTVERSION= 0.0.27 +PORTVERSION= 0.0.29 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-pip-api/distinfo b/devel/py-pip-api/distinfo index 8c4c9dcb136..a22be9eab63 100644 --- a/devel/py-pip-api/distinfo +++ b/devel/py-pip-api/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058012 -SHA256 (pip-api-0.0.27.tar.gz) = 354825f2fa89b9b1c56e943be32823c09590e13055a57af5590456ff7d6524bd -SIZE (pip-api-0.0.27.tar.gz) = 111482 +TIMESTAMP = 1647264562 +SHA256 (pip-api-0.0.29.tar.gz) = f701584eb1c3e01021c846f89d629ab9373b6624f0626757774ad54fc4c29571 +SIZE (pip-api-0.0.29.tar.gz) = 111361 diff --git a/devel/py-plex/files/patch-2to3 b/devel/py-plex/files/patch-2to3 new file mode 100644 index 00000000000..cad5deda331 --- /dev/null +++ b/devel/py-plex/files/patch-2to3 @@ -0,0 +1,517 @@ +--- Plex/Lexicons.py.orig 2003-07-08 08:35:31 UTC ++++ Plex/Lexicons.py +@@ -8,11 +8,11 @@ + + import types + +-import Actions +-import DFA +-import Errors +-import Machines +-import Regexps ++from . import Actions ++from . import DFA ++from . import Errors ++from . import Machines ++from . import Regexps + + # debug_flags for Lexicon constructor + DUMP_NFA = 1 +@@ -111,10 +111,10 @@ class Lexicon: + tables = None # StateTableMachine + + def __init__(self, specifications, debug = None, debug_flags = 7, timings = None): +- if type(specifications) <> types.ListType: ++ if type(specifications) != list: + raise Errors.InvalidScanner("Scanner definition is not a list") + if timings: +- from Timing import time ++ from .Timing import time + total_time = 0.0 + time1 = time() + nfa = Machines.Machine() +@@ -127,7 +127,7 @@ class Lexicon: + self.add_token_to_machine( + nfa, user_initial_state, token, token_number) + token_number = token_number + 1 +- elif type(spec) == types.TupleType: ++ elif type(spec) == tuple: + self.add_token_to_machine( + nfa, default_initial_state, spec, token_number) + token_number = token_number + 1 +@@ -172,13 +172,13 @@ class Lexicon: + re.build_machine(machine, initial_state, final_state, + match_bol = 1, nocase = 0) + final_state.set_action(action, priority = -token_number) +- except Errors.PlexError, e: ++ except Errors.PlexError as e: + raise e.__class__("Token number %d: %s" % (token_number, e)) + + def parse_token_definition(self, token_spec): +- if type(token_spec) <> types.TupleType: ++ if type(token_spec) != tuple: + raise Errors.InvalidToken("Token definition is not a tuple") +- if len(token_spec) <> 2: ++ if len(token_spec) != 2: + raise Errors.InvalidToken("Wrong number of items in token definition") + pattern, action = token_spec + if not isinstance(pattern, Regexps.RE): +--- Plex/Machines.py.orig 2003-07-08 08:35:31 UTC ++++ Plex/Machines.py +@@ -8,12 +8,12 @@ + + import string + import sys +-from sys import maxint ++from sys import maxsize + from types import TupleType + +-from Transitions import TransitionMap ++from .Transitions import TransitionMap + +-LOWEST_PRIORITY = -sys.maxint ++LOWEST_PRIORITY = -sys.maxsize + + class Machine: + """A collection of Nodes representing an NFA or DFA.""" +@@ -54,7 +54,7 @@ class Machine: + file.write("Plex.Machine:\n") + if self.initial_states is not None: + file.write(" Initial states:\n") +- for (name, state) in self.initial_states.items(): ++ for (name, state) in list(self.initial_states.items()): + file.write(" '%s': %d\n" % (name, state.number)) + for s in self.states: + s.dump(file) +@@ -150,13 +150,13 @@ class FastMachine: + for old_state in old_machine.states: + new_state = self.new_state() + old_to_new[old_state] = new_state +- for name, old_state in old_machine.initial_states.items(): ++ for name, old_state in list(old_machine.initial_states.items()): + initial_states[name] = old_to_new[old_state] + for old_state in old_machine.states: + new_state = old_to_new[old_state] +- for event, old_state_set in old_state.transitions.items(): ++ for event, old_state_set in list(old_state.transitions.items()): + if old_state_set: +- new_state[event] = old_to_new[old_state_set.keys()[0]] ++ new_state[event] = old_to_new[list(old_state_set.keys())[0]] + else: + new_state[event] = None + new_state['action'] = old_state.action +@@ -182,7 +182,7 @@ class FastMachine: + code0, code1 = event + if code0 == -maxint: + state['else'] = new_state +- elif code1 <> maxint: ++ elif code1 != maxint: + while code0 < code1: + state[chr(code0)] = new_state + code0 = code0 + 1 +@@ -195,7 +195,7 @@ class FastMachine: + def dump(self, file): + file.write("Plex.FastMachine:\n") + file.write(" Initial states:\n") +- for name, state in self.initial_states.items(): ++ for name, state in list(self.initial_states.items()): + file.write(" %s: %s\n" % (repr(name), state['number'])) + for state in self.states: + self.dump_state(state, file) +@@ -214,7 +214,7 @@ class FastMachine: + def dump_transitions(self, state, file): + chars_leading_to_state = {} + special_to_state = {} +- for (c, s) in state.items(): ++ for (c, s) in list(state.items()): + if len(c) == 1: + chars = chars_leading_to_state.get(id(s), None) + if chars is None: +@@ -229,7 +229,7 @@ class FastMachine: + if char_list: + ranges = self.chars_to_ranges(char_list) + ranges_to_state[ranges] = state +- ranges_list = ranges_to_state.keys() ++ ranges_list = list(ranges_to_state.keys()) + ranges_list.sort() + for ranges in ranges_list: + key = self.ranges_to_string(ranges) +@@ -256,9 +256,10 @@ class FastMachine: + return tuple(result) + + def ranges_to_string(self, range_list): +- return string.join(map(self.range_to_string, range_list), ",") ++ return string.join(list(map(self.range_to_string, range_list)), ",") + +- def range_to_string(self, (c1, c2)): ++ def range_to_string(self, xxx_todo_changeme): ++ (c1, c2) = xxx_todo_changeme + if c1 == c2: + return repr(c1) + else: +--- Plex/Regexps.py.orig 2003-07-08 08:35:31 UTC ++++ Plex/Regexps.py +@@ -9,9 +9,9 @@ + import array + import string + import types +-from sys import maxint ++from sys import maxsize + +-import Errors ++from . import Errors + + # + # Constants +@@ -81,9 +81,9 @@ def CodeRanges(code_list): + an RE which will match a character in any of the ranges. + """ + re_list = [] +- for i in xrange(0, len(code_list), 2): ++ for i in range(0, len(code_list), 2): + re_list.append(CodeRange(code_list[i], code_list[i + 1])) +- return apply(Alt, tuple(re_list)) ++ return Alt(*tuple(re_list)) + + def CodeRange(code1, code2): + """ +@@ -152,12 +152,12 @@ class RE: + self.wrong_type(num, value, "Plex.RE instance") + + def check_string(self, num, value): +- if type(value) <> type(''): ++ if type(value) != type(''): + self.wrong_type(num, value, "string") + + def check_char(self, num, value): + self.check_string(num, value) +- if len(value) <> 1: ++ if len(value) != 1: + raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s." + "Expected a string of length 1, got: %s" % ( + num, self.__class__.__name__, repr(value))) +@@ -294,7 +294,7 @@ class Seq(RE): + + def __init__(self, *re_list): + nullable = 1 +- for i in xrange(len(re_list)): ++ for i in range(len(re_list)): + re = re_list[i] + self.check_re(i, re) + nullable = nullable and re.nullable +@@ -319,7 +319,7 @@ class Seq(RE): + else: + s1 = initial_state + n = len(re_list) +- for i in xrange(n): ++ for i in range(n): + if i < n - 1: + s2 = m.new_state() + else: +@@ -330,7 +330,7 @@ class Seq(RE): + match_bol = re.match_nl or (match_bol and re.nullable) + + def calc_str(self): +- return "Seq(%s)" % string.join(map(str, self.re_list), ",") ++ return "Seq(%s)" % string.join(list(map(str, self.re_list)), ",") + + + class Alt(RE): +@@ -369,7 +369,7 @@ class Alt(RE): + re.build_machine(m, initial_state, final_state, 0, nocase) + + def calc_str(self): +- return "Alt(%s)" % string.join(map(str, self.re_list), ",") ++ return "Alt(%s)" % string.join(list(map(str, self.re_list)), ",") + + + class Rep1(RE): +@@ -437,7 +437,7 @@ def Str1(s): + """ + Str1(s) is an RE which matches the literal string |s|. + """ +- result = apply(Seq, tuple(map(Char, s))) ++ result = Seq(*tuple(map(Char, s))) + result.str = "Str(%s)" % repr(s) + return result + +@@ -449,8 +449,8 @@ def Str(*strs): + if len(strs) == 1: + return Str1(strs[0]) + else: +- result = apply(Alt, tuple(map(Str1, strs))) +- result.str = "Str(%s)" % string.join(map(repr, strs), ",") ++ result = Alt(*tuple(map(Str1, strs))) ++ result.str = "Str(%s)" % string.join(list(map(repr, strs)), ",") + return result + + def Any(s): +@@ -495,7 +495,7 @@ def Range(s1, s2 = None): + ranges = [] + for i in range(0, len(s1), 2): + ranges.append(CodeRange(ord(s1[i]), ord(s1[i+1]) + 1)) +- result = apply(Alt, tuple(ranges)) ++ result = Alt(*tuple(ranges)) + result.str = "Range(%s)" % repr(s1) + return result + +--- Plex/Scanners.py.orig 2003-07-08 08:35:31 UTC ++++ Plex/Scanners.py +@@ -7,8 +7,8 @@ + # + #======================================================================= + +-import Errors +-from Regexps import BOL, EOL, EOF ++from . import Errors ++from .Regexps import BOL, EOL, EOF + + class Scanner: + """ +@@ -122,8 +122,8 @@ class Scanner: + action = self.run_machine_inlined() + if action: + if self.trace: +- print "Scanner: read: Performing", action, "%d:%d" % ( +- self.start_pos, self.cur_pos) ++ print("Scanner: read: Performing", action, "%d:%d" % ( ++ self.start_pos, self.cur_pos)) + base = self.buf_start_pos + text = self.buffer[self.start_pos - base : self.cur_pos - base] + return (text, action) +@@ -163,8 +163,8 @@ class Scanner: + trace = self.trace + while 1: + if trace: #TRACE# +- print "State %d, %d/%d:%s -->" % ( #TRACE# +- state['number'], input_state, cur_pos, repr(cur_char)), #TRACE# ++ print("State %d, %d/%d:%s -->" % ( #TRACE# ++ state['number'], input_state, cur_pos, repr(cur_char)), end=' ') #TRACE# + # Begin inlined self.save_for_backup() + #action = state.action #@slow + action = state['action'] #@fast +@@ -179,7 +179,7 @@ class Scanner: + new_state = c and state.get('else') #@fast + if new_state: + if trace: #TRACE# +- print "State %d" % new_state['number'] #TRACE# ++ print("State %d" % new_state['number']) #TRACE# + state = new_state + # Begin inlined: self.next_char() + if input_state == 1: +@@ -228,7 +228,7 @@ class Scanner: + # End inlined self.next_char() + else: # not new_state + if trace: #TRACE# +- print "blocked" #TRACE# ++ print("blocked") #TRACE# + # Begin inlined: action = self.back_up() + if backup_state: + (action, cur_pos, cur_line, cur_line_start, +@@ -245,7 +245,7 @@ class Scanner: + self.next_pos = next_pos + if trace: #TRACE# + if action: #TRACE# +- print "Doing", action #TRACE# ++ print("Doing", action) #TRACE# + return action + + # def transition(self): +@@ -288,7 +288,7 @@ class Scanner: + def next_char(self): + input_state = self.input_state + if self.trace: +- print "Scanner: next:", " "*20, "[%d] %d" % (input_state, self.cur_pos), ++ print("Scanner: next:", " "*20, "[%d] %d" % (input_state, self.cur_pos), end=' ') + if input_state == 1: + self.cur_pos = self.next_pos + c = self.read_char() +@@ -314,7 +314,7 @@ class Scanner: + else: # input_state = 5 + self.cur_char = '' + if self.trace: +- print "--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char)) ++ print("--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char))) + + # def read_char(self): + # """ +--- Plex/test_tm.py.orig 2003-07-08 08:35:31 UTC ++++ Plex/test_tm.py +@@ -4,14 +4,14 @@ sys.stderr = sys.stdout + from TransitionMaps import TransitionMap + + m = TransitionMap() +-print m ++print(m) + + def add(c, s): +- print +- print "adding", repr(c), "-->", repr(s) ++ print() ++ print("adding", repr(c), "-->", repr(s)) + m.add_transition(c, s) +- print m +- print "keys:", m.keys() ++ print(m) ++ print("keys:", list(m.keys())) + + add('a','alpha') + add('e', 'eta') +--- Plex/Traditional.py.orig 2003-07-08 08:35:31 UTC ++++ Plex/Traditional.py +@@ -6,8 +6,8 @@ + # + #======================================================================= + +-from Regexps import * +-from Errors import PlexError ++from .Regexps import * ++from .Errors import PlexError + + class RegexpSyntaxError(PlexError): + pass +@@ -25,7 +25,7 @@ class REParser: + self.s = s + self.i = -1 + self.end = 0 +- self.next() ++ next(self) + + def parse_re(self): + re = self.parse_alt() +@@ -39,9 +39,9 @@ class REParser: + if self.c == '|': + re_list = [re] + while self.c == '|': +- self.next() ++ next(self) + re_list.append(self.parse_seq()) +- re = apply(Alt, tuple(re_list)) ++ re = Alt(*tuple(re_list)) + return re + + def parse_seq(self): +@@ -49,7 +49,7 @@ class REParser: + re_list = [] + while not self.end and not self.c in "|)": + re_list.append(self.parse_mod()) +- return apply(Seq, tuple(re_list)) ++ return Seq(*tuple(re_list)) + + def parse_mod(self): + """Parse a primitive regexp followed by *, +, ? modifiers.""" +@@ -61,7 +61,7 @@ class REParser: + re = Rep1(re) + else: # self.c == '?' + re = Opt(re) +- self.next() ++ next(self) + return re + + def parse_prim(self): +@@ -91,16 +91,16 @@ class REParser: + invert = 0 + if self.c == '^': + invert = 1 +- self.next() ++ next(self) + if self.c == ']': + char_list.append(']') +- self.next() +- while not self.end and self.c <> ']': ++ next(self) ++ while not self.end and self.c != ']': + c1 = self.get() +- if self.c == '-' and self.lookahead(1) <> ']': +- self.next() ++ if self.c == '-' and self.lookahead(1) != ']': ++ next(self) + c2 = self.get() +- for a in xrange(ord(c1), ord(c2) + 1): ++ for a in range(ord(c1), ord(c2) + 1): + char_list.append(chr(a)) + else: + char_list.append(c1) +@@ -110,7 +110,7 @@ class REParser: + else: + return Any(chars) + +- def next(self): ++ def __next__(self): + """Advance to the next char.""" + s = self.s + i = self.i = self.i + 1 +@@ -124,7 +124,7 @@ class REParser: + if self.end: + self.error("Premature end of string") + c = self.c +- self.next() ++ next(self) + return c + + def lookahead(self, n): +@@ -141,7 +141,7 @@ class REParser: + Raises an exception otherwise. + """ + if self.c == c: +- self.next() ++ next(self) + else: + self.error("Missing %s" % repr(c)) + +--- Plex/Transitions.py.orig 2007-01-27 02:58:25 UTC ++++ Plex/Transitions.py +@@ -7,7 +7,7 @@ + + from copy import copy + import string +-from sys import maxint ++from sys import maxsize + from types import TupleType + + class TransitionMap: +@@ -107,7 +107,7 @@ class TransitionMap: + result.append(((code0, code1), set)) + code0 = code1 + i = i + 2 +- for event, set in self.special.items(): ++ for event, set in list(self.special.items()): + if set: + result.append((event, set)) + return result +@@ -177,7 +177,7 @@ class TransitionMap: + map_strs.append(state_set_str(map[i])) + i = i + 1 + special_strs = {} +- for event, set in self.special.items(): ++ for event, set in list(self.special.items()): + special_strs[event] = state_set_str(set) + return "[%s]+%s" % ( + string.join(map_strs, ","), +@@ -189,7 +189,7 @@ class TransitionMap: + def check(self): + """Check data structure integrity.""" + if not self.map[-3] < self.map[-1]: +- print self ++ print(self) + assert 0 + + def dump(self, file): +@@ -199,7 +199,7 @@ class TransitionMap: + while i < n: + self.dump_range(map[i], map[i + 2], map[i + 1], file) + i = i + 2 +- for event, set in self.special.items(): ++ for event, set in list(self.special.items()): + if set: + if not event: + event = 'empty' +@@ -242,7 +242,7 @@ class TransitionMap: + # set1[state] = 1 + + def state_set_str(set): +- state_list = set.keys() ++ state_list = list(set.keys()) + str_list = [] + for state in state_list: + str_list.append("S%d" % state.number) diff --git a/devel/py-poetry-core/Makefile b/devel/py-poetry-core/Makefile index 9e953c7e7ed..e43b730ee21 100644 --- a/devel/py-poetry-core/Makefile +++ b/devel/py-poetry-core/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= poetry-core -PORTVERSION= 1.0.7 +PORTVERSION= 1.0.8 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-poetry-core/distinfo b/devel/py-poetry-core/distinfo index 56ef64e1adb..92099832c7c 100644 --- a/devel/py-poetry-core/distinfo +++ b/devel/py-poetry-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634111768 -SHA256 (poetry-core-1.0.7.tar.gz) = 98c11c755a16ef6c5673c22ca94a3802a7df4746a0853a70b6fae8b9f5cac206 -SIZE (poetry-core-1.0.7.tar.gz) = 346244 +TIMESTAMP = 1647264564 +SHA256 (poetry-core-1.0.8.tar.gz) = 951fc7c1f8d710a94cb49019ee3742125039fc659675912ea614ac2aa405b118 +SIZE (poetry-core-1.0.8.tar.gz) = 346260 diff --git a/devel/py-prettytable/Makefile b/devel/py-prettytable/Makefile index aeb26849310..1ba2f9e9474 100644 --- a/devel/py-prettytable/Makefile +++ b/devel/py-prettytable/Makefile @@ -1,7 +1,7 @@ # Created by: Roman Bogorodskiy PORTNAME= prettytable -PORTVERSION= 3.1.1 +PORTVERSION= 3.2.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-prettytable/distinfo b/devel/py-prettytable/distinfo index cbdc3d87e5f..dc184e12481 100644 --- a/devel/py-prettytable/distinfo +++ b/devel/py-prettytable/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058016 -SHA256 (prettytable-3.1.1.tar.gz) = 43c9e23272ca253d038ae76fe3adde89794e92e7fcab2ddf5b94b38642ef4f21 -SIZE (prettytable-3.1.1.tar.gz) = 52705 +TIMESTAMP = 1647264566 +SHA256 (prettytable-3.2.0.tar.gz) = ae7d96c64100543dc61662b40a28f3b03c0f94a503ed121c6fca2782c5816f81 +SIZE (prettytable-3.2.0.tar.gz) = 52976 diff --git a/devel/py-proto-plus/Makefile b/devel/py-proto-plus/Makefile index e22eab75a65..d144352ccb3 100644 --- a/devel/py-proto-plus/Makefile +++ b/devel/py-proto-plus/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= proto-plus -PORTVERSION= 1.19.9 +PORTVERSION= 1.20.3 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-proto-plus/distinfo b/devel/py-proto-plus/distinfo index 372c0081c69..52cade9d6fd 100644 --- a/devel/py-proto-plus/distinfo +++ b/devel/py-proto-plus/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971084 -SHA256 (proto-plus-1.19.9.tar.gz) = 4ca4055f7c5c1a2239ac7a12770a76a16269f58d3f01631523c20fc81dbb14a7 -SIZE (proto-plus-1.19.9.tar.gz) = 49652 +TIMESTAMP = 1647264568 +SHA256 (proto-plus-1.20.3.tar.gz) = f28b225bc9e6c14e206fb7f8e996a46fb2ccd902648e512d496abb6a716a4ae5 +SIZE (proto-plus-1.20.3.tar.gz) = 51697 diff --git a/devel/py-pycalendar/files/patch-2to3 b/devel/py-pycalendar/files/patch-2to3 new file mode 100644 index 00000000000..c16832c7b37 --- /dev/null +++ b/devel/py-pycalendar/files/patch-2to3 @@ -0,0 +1,48 @@ +--- src/pycalendar/datetime.py.orig 2011-05-24 16:51:10 UTC ++++ src/pycalendar/datetime.py +@@ -20,7 +20,7 @@ from pycalendar import utils + from pycalendar.duration import PyCalendarDuration + from pycalendar.timezone import PyCalendarTimezone + from pycalendar.valueutils import ValueMixin +-import cStringIO as StringIO ++import io as StringIO + import time + + class PyCalendarDateTime(ValueMixin): +@@ -254,13 +254,13 @@ class PyCalendarDateTime(ValueMixin): + # Look for cached value (or floating time which has to be calculated + # each time) + if ( not self.mPosixTimeCached ) or self.floating(): +- result = 0L ++ result = 0 + + # Add hour/mins/secs +- result = ( self.mHours * 60L + self.mMinutes ) * 60L + self.mSeconds ++ result = ( self.mHours * 60 + self.mMinutes ) * 60 + self.mSeconds + + # Number of days since 1970 +- result += self.daysSince1970() * 24L * 60L * 60L ++ result += self.daysSince1970() * 24 * 60 * 60 + + # Adjust for timezone offset + result -= self.timeZoneSecondsOffset() +--- src/pycalendar/vcard/card.py.orig 2011-05-24 16:51:10 UTC ++++ src/pycalendar/vcard/card.py +@@ -14,7 +14,7 @@ + # limitations under the License. + ## + +-from cStringIO import StringIO ++from io import StringIO + from pycalendar.componentbase import PyCalendarComponentBase + from pycalendar.exceptions import PyCalendarInvalidData + from pycalendar.parser import ParserContext +@@ -214,7 +214,7 @@ class Card(PyCalendarComponentBase): + else: + self.addProperty(prop) + except IndexError: +- print line ++ print(line) + + # Check for truncated data + if state != LOOK_FOR_VCARD: diff --git a/devel/py-pydevd/files/patch-2to3 b/devel/py-pydevd/files/patch-2to3 new file mode 100644 index 00000000000..9b14d28c4aa --- /dev/null +++ b/devel/py-pydevd/files/patch-2to3 @@ -0,0 +1,827 @@ +--- _pydev_imps/_pydev_SimpleXMLRPCServer.py.orig 2017-03-14 15:21:50 UTC ++++ _pydev_imps/_pydev_SimpleXMLRPCServer.py +@@ -155,7 +155,7 @@ def remove_duplicates(lst): + for x in lst: + u[x] = 1 + +- return u.keys() ++ return list(u.keys()) + + class SimpleXMLRPCDispatcher: + """Mix-in class that dispatches XML-RPC requests. +@@ -260,13 +260,13 @@ class SimpleXMLRPCDispatcher: + response = (response,) + response = xmlrpclib.dumps(response, methodresponse=1, + allow_none=self.allow_none, encoding=self.encoding) +- except Fault, fault: ++ except Fault as fault: + response = xmlrpclib.dumps(fault, allow_none=self.allow_none, + encoding=self.encoding) + except: + # report exception back to server + response = xmlrpclib.dumps( +- xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value)), #@UndefinedVariable exc_value only available when we actually have an exception ++ xmlrpclib.Fault(1, "%s:%s" % (sys.exc_info()[0], sys.exc_info()[1])), #@UndefinedVariable exc_value only available when we actually have an exception + encoding=self.encoding, allow_none=self.allow_none, + ) + +@@ -277,7 +277,7 @@ class SimpleXMLRPCDispatcher: + + Returns a list of the methods supported by the server.""" + +- methods = self.funcs.keys() ++ methods = list(self.funcs.keys()) + if self.instance is not None: + # Instance can implement _listMethod to return a list of + # methods +@@ -314,7 +314,7 @@ class SimpleXMLRPCDispatcher: + Returns a string containing documentation for the specified method.""" + + method = None +- if self.funcs.has_key(method_name): ++ if method_name in self.funcs: + method = self.funcs[method_name] + elif self.instance is not None: + # Instance can implement _methodHelp to return help for a method +@@ -363,7 +363,7 @@ class SimpleXMLRPCDispatcher: + # XXX A marshalling error in any response will fail the entire + # multicall. If someone cares they should fix this. + results.append([self._dispatch(method_name, params)]) +- except Fault, fault: ++ except Fault as fault: + results.append( + {'faultCode' : fault.faultCode, + 'faultString' : fault.faultString} +@@ -371,7 +371,7 @@ class SimpleXMLRPCDispatcher: + except: + results.append( + {'faultCode' : 1, +- 'faultString' : "%s:%s" % (sys.exc_type, sys.exc_value)} #@UndefinedVariable exc_value only available when we actually have an exception ++ 'faultString' : "%s:%s" % (sys.exc_info()[0], sys.exc_info()[1])} #@UndefinedVariable exc_value only available when we actually have an exception + ) + return results + +--- _pydev_imps/_pydev_SocketServer.py.orig 2017-02-06 17:09:10 UTC ++++ _pydev_imps/_pydev_SocketServer.py +@@ -336,12 +336,12 @@ class BaseServer: + The default is to print a traceback and continue. + + """ +- print '-'*40 +- print 'Exception happened during processing of request from', +- print client_address ++ print('-'*40) ++ print('Exception happened during processing of request from', end=' ') ++ print(client_address) + import traceback + traceback.print_exc() # XXX But this goes to stderr! +- print '-'*40 ++ print('-'*40) + + + class TCPServer(BaseServer): +@@ -528,7 +528,7 @@ class ForkingMixIn: + if not pid: continue + try: + self.active_children.remove(pid) +- except ValueError, e: ++ except ValueError as e: + raise ValueError('%s. x=%d and list=%r' % (e.message, pid, + self.active_children)) + +@@ -704,9 +704,9 @@ class DatagramRequestHandler(BaseRequestHandler): + + def setup(self): + try: +- from cStringIO import StringIO ++ from io import StringIO + except ImportError: +- from StringIO import StringIO ++ from io import StringIO + self.packet, self.socket = self.request + self.rfile = StringIO(self.packet) + self.wfile = StringIO() +--- _pydev_imps/_pydev_inspect.py.orig 2016-04-26 16:50:14 UTC ++++ _pydev_imps/_pydev_inspect.py +@@ -44,7 +44,7 @@ def isclass(object): + Class objects provide these attributes: + __doc__ documentation string + __module__ name of module in which this class was defined""" +- return isinstance(object, types.ClassType) or hasattr(object, '__bases__') ++ return isinstance(object, type) or hasattr(object, '__bases__') + + def ismethod(object): + """Return true if the object is an instance method. +@@ -267,7 +267,7 @@ def getdoc(object): + doc = object.__doc__ + except AttributeError: + return None +- if not isinstance(doc, (str, unicode)): ++ if not isinstance(doc, str): + return None + try: + lines = string.split(string.expandtabs(doc), '\n') +@@ -290,30 +290,29 @@ def getfile(object): + if ismodule(object): + if hasattr(object, '__file__'): + return object.__file__ +- raise TypeError, 'arg is a built-in module' ++ raise TypeError('arg is a built-in module') + if isclass(object): + object = sys.modules.get(object.__module__) + if hasattr(object, '__file__'): + return object.__file__ +- raise TypeError, 'arg is a built-in class' ++ raise TypeError('arg is a built-in class') + if ismethod(object): +- object = object.im_func ++ object = object.__func__ + if isfunction(object): +- object = object.func_code ++ object = object.__code__ + if istraceback(object): + object = object.tb_frame + if isframe(object): + object = object.f_code + if iscode(object): + return object.co_filename +- raise TypeError, 'arg is not a module, class, method, ' \ +- 'function, traceback, frame, or code object' ++ raise TypeError('arg is not a module, class, method, ' \ ++ 'function, traceback, frame, or code object') + + def getmoduleinfo(path): + """Get the module name, suffix, mode, and module type for a given file.""" + filename = os.path.basename(path) +- suffixes = map(lambda (suffix, mode, mtype): +- (-len(suffix), suffix, mode, mtype), imp.get_suffixes()) ++ suffixes = [(-len(suffix_mode_mtype[0]), suffix_mode_mtype[0], suffix_mode_mtype[1], suffix_mode_mtype[2]) for suffix_mode_mtype in imp.get_suffixes()] + suffixes.sort() # try longest suffixes first, in case they overlap + for neglen, suffix, mode, mtype in suffixes: + if filename[neglen:] == suffix: +@@ -356,12 +355,12 @@ def getmodule(object): + file = getabsfile(object) + except TypeError: + return None +- if modulesbyfile.has_key(file): ++ if file in modulesbyfile: + return sys.modules[modulesbyfile[file]] +- for module in sys.modules.values(): ++ for module in list(sys.modules.values()): + if hasattr(module, '__file__'): + modulesbyfile[getabsfile(module)] = module.__name__ +- if modulesbyfile.has_key(file): ++ if file in modulesbyfile: + return sys.modules[modulesbyfile[file]] + main = sys.modules['__main__'] + if hasattr(main, object.__name__): +@@ -384,7 +383,7 @@ def findsource(object): + try: + file = open(getsourcefile(object)) + except (TypeError, IOError): +- raise IOError, 'could not get source code' ++ raise IOError('could not get source code') + lines = file.readlines() + file.close() + +@@ -396,26 +395,26 @@ def findsource(object): + pat = re.compile(r'^\s*class\s*' + name + r'\b') + for i in range(len(lines)): + if pat.match(lines[i]): return lines, i +- else: raise IOError, 'could not find class definition' ++ else: raise IOError('could not find class definition') + + if ismethod(object): +- object = object.im_func ++ object = object.__func__ + if isfunction(object): +- object = object.func_code ++ object = object.__code__ + if istraceback(object): + object = object.tb_frame + if isframe(object): + object = object.f_code + if iscode(object): + if not hasattr(object, 'co_firstlineno'): +- raise IOError, 'could not find function definition' ++ raise IOError('could not find function definition') + lnum = object.co_firstlineno - 1 + pat = re.compile(r'^(\s*def\s)|(.*\slambda(:|\s))') + while lnum > 0: + if pat.match(lines[lnum]): break + lnum = lnum - 1 + return lines, lnum +- raise IOError, 'could not find code object' ++ raise IOError('could not find code object') + + def getcomments(object): + """Get lines of comments immediately preceding an object's source code.""" +@@ -479,7 +478,9 @@ class BlockFinder: + self.started = 0 + self.last = 0 + +- def tokeneater(self, type, token, (srow, scol), (erow, ecol), line): ++ def tokeneater(self, type, token, xxx_todo_changeme, xxx_todo_changeme1, line): ++ (srow, scol) = xxx_todo_changeme ++ (erow, ecol) = xxx_todo_changeme1 + if not self.started: + if type == tokenize.NAME: self.started = 1 + elif type == tokenize.NEWLINE: +@@ -488,15 +489,15 @@ class BlockFinder: + self.indent = self.indent + 1 + elif type == tokenize.DEDENT: + self.indent = self.indent - 1 +- if self.indent == 0: raise EndOfBlock, self.last ++ if self.indent == 0: raise EndOfBlock(self.last) + elif type == tokenize.NAME and scol == 0: +- raise EndOfBlock, self.last ++ raise EndOfBlock(self.last) + + def getblock(lines): + """Extract the block of code at the top of the given list of lines.""" + try: + tokenize.tokenize(ListReader(lines).readline, BlockFinder().tokeneater) +- except EndOfBlock, eob: ++ except EndOfBlock as eob: + return lines[:eob.args[0]] + # Fooling the indent/dedent logic implies a one-line definition + return lines[:1] +@@ -530,7 +531,7 @@ def walktree(classes, children, parent): + classes.sort(lambda a, b: cmp(a.__name__, b.__name__)) + for c in classes: + results.append((c, c.__bases__)) +- if children.has_key(c): ++ if c in children: + results.append(walktree(children[c], children, c)) + return results + +@@ -548,13 +549,13 @@ def getclasstree(classes, unique=0): + for c in classes: + if c.__bases__: + for parent in c.__bases__: +- if not children.has_key(parent): ++ if parent not in children: + children[parent] = [] + children[parent].append(c) + if unique and parent in classes: break + elif c not in roots: + roots.append(c) +- for parent in children.keys(): ++ for parent in list(children.keys()): + if parent not in classes: + roots.append(parent) + return walktree(roots, children, None) +@@ -569,7 +570,7 @@ def getargs(co): + Three things are returned: (args, varargs, varkw), where 'args' is + a list of argument names (possibly containing nested lists), and + 'varargs' and 'varkw' are the names of the * and ** arguments or None.""" +- if not iscode(co): raise TypeError, 'arg is not a code object' ++ if not iscode(co): raise TypeError('arg is not a code object') + + nargs = co.co_argcount + names = co.co_varnames +@@ -622,10 +623,10 @@ def getargspec(func): + 'varargs' and 'varkw' are the names of the * and ** arguments or None. + 'defaults' is an n-tuple of the default values of the last n arguments.""" + if ismethod(func): +- func = func.im_func +- if not isfunction(func): raise TypeError, 'arg is not a Python function' +- args, varargs, varkw = getargs(func.func_code) +- return args, varargs, varkw, func.func_defaults ++ func = func.__func__ ++ if not isfunction(func): raise TypeError('arg is not a Python function') ++ args, varargs, varkw = getargs(func.__code__) ++ return args, varargs, varkw, func.__defaults__ + + def getargvalues(frame): + """Get information about arguments passed into a particular frame. +@@ -645,8 +646,8 @@ def joinseq(seq): + + def strseq(object, convert, join=joinseq): + """Recursively walk a sequence, stringifying each element.""" +- if type(object) in [types.ListType, types.TupleType]: +- return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object)) ++ if type(object) in [list, tuple]: ++ return join(list(map(lambda o, c=convert, j=join: strseq(o, c, j), object))) + else: + return convert(object) + +@@ -785,4 +786,4 @@ def stack(context=1): + + def trace(context=1): + """Return a list of records for the stack below the current exception.""" +- return getinnerframes(sys.exc_traceback, context) #@UndefinedVariable ++ return getinnerframes(sys.exc_info()[2], context) #@UndefinedVariable +--- _pydev_imps/_pydev_pkgutil_old.py.orig 2016-04-26 16:50:14 UTC ++++ _pydev_imps/_pydev_pkgutil_old.py +@@ -140,7 +140,7 @@ def iter_modules(path=None, prefix=''): + if path is None: + importers = iter_importers() + else: +- importers = map(get_importer, path) ++ importers = list(map(get_importer, path)) + + yielded = {} + for i in importers: +@@ -329,7 +329,7 @@ try: + from zipimport import zipimporter + + def iter_zipimport_modules(importer, prefix=''): +- dirlist = zipimport._zip_directory_cache[importer.archive].keys() ++ dirlist = list(zipimport._zip_directory_cache[importer.archive].keys()) + dirlist.sort() + _prefix = importer.prefix + plen = len(_prefix) +@@ -526,7 +526,7 @@ def extend_path(path, name): + path = path[:] # Start with a copy of the existing path + + for dir in sys.path: +- if not isinstance(dir, basestring) or not os.path.isdir(dir): ++ if not isinstance(dir, str) or not os.path.isdir(dir): + continue + subdir = os.path.join(dir, pname) + # XXX This may still add duplicate entries to path on +@@ -540,7 +540,7 @@ def extend_path(path, name): + if os.path.isfile(pkgfile): + try: + f = open(pkgfile) +- except IOError, msg: ++ except IOError as msg: + sys.stderr.write("Can't open %s: %s\n" % + (pkgfile, msg)) + else: +--- _pydev_imps/_pydev_uuid_old.py.orig 2017-02-06 17:09:10 UTC ++++ _pydev_imps/_pydev_uuid_old.py +@@ -132,7 +132,7 @@ class UUID(object): + hex = hex.strip('{}').replace('-', '') + if len(hex) != 32: + raise ValueError('badly formed hexadecimal UUID string') +- int = long(hex, 16) ++ int = int(hex, 16) + if bytes_le is not None: + if len(bytes_le) != 16: + raise ValueError('bytes_le is not a 16-char string') +@@ -142,39 +142,39 @@ class UUID(object): + if bytes is not None: + if len(bytes) != 16: + raise ValueError('bytes is not a 16-char string') +- int = long(('%02x'*16) % tuple(map(ord, bytes)), 16) ++ int = int(('%02x'*16) % tuple(map(ord, bytes)), 16) + if fields is not None: + if len(fields) != 6: + raise ValueError('fields is not a 6-tuple') + (time_low, time_mid, time_hi_version, + clock_seq_hi_variant, clock_seq_low, node) = fields +- if not 0 <= time_low < 1<<32L: ++ if not 0 <= time_low < 1<<32: + raise ValueError('field 1 out of range (need a 32-bit value)') +- if not 0 <= time_mid < 1<<16L: ++ if not 0 <= time_mid < 1<<16: + raise ValueError('field 2 out of range (need a 16-bit value)') +- if not 0 <= time_hi_version < 1<<16L: ++ if not 0 <= time_hi_version < 1<<16: + raise ValueError('field 3 out of range (need a 16-bit value)') +- if not 0 <= clock_seq_hi_variant < 1<<8L: ++ if not 0 <= clock_seq_hi_variant < 1<<8: + raise ValueError('field 4 out of range (need an 8-bit value)') +- if not 0 <= clock_seq_low < 1<<8L: ++ if not 0 <= clock_seq_low < 1<<8: + raise ValueError('field 5 out of range (need an 8-bit value)') +- if not 0 <= node < 1<<48L: ++ if not 0 <= node < 1<<48: + raise ValueError('field 6 out of range (need a 48-bit value)') +- clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low +- int = ((time_low << 96L) | (time_mid << 80L) | +- (time_hi_version << 64L) | (clock_seq << 48L) | node) ++ clock_seq = (clock_seq_hi_variant << 8) | clock_seq_low ++ int = ((time_low << 96) | (time_mid << 80) | ++ (time_hi_version << 64) | (clock_seq << 48) | node) + if int is not None: +- if not 0 <= int < 1<<128L: ++ if not 0 <= int < 1<<128: + raise ValueError('int is out of range (need a 128-bit value)') + if version is not None: + if not 1 <= version <= 5: + raise ValueError('illegal version number') + # Set the variant to RFC 4122. +- int &= ~(0xc000 << 48L) +- int |= 0x8000 << 48L ++ int &= ~(0xc000 << 48) ++ int |= 0x8000 << 48 + # Set the version number. +- int &= ~(0xf000 << 64L) +- int |= version << 76L ++ int &= ~(0xf000 << 64) ++ int |= version << 76 + self.__dict__['int'] = int + + def __cmp__(self, other): +@@ -221,38 +221,38 @@ class UUID(object): + fields = property(get_fields) + + def get_time_low(self): +- return self.int >> 96L ++ return self.int >> 96 + + time_low = property(get_time_low) + + def get_time_mid(self): +- return (self.int >> 80L) & 0xffff ++ return (self.int >> 80) & 0xffff + + time_mid = property(get_time_mid) + + def get_time_hi_version(self): +- return (self.int >> 64L) & 0xffff ++ return (self.int >> 64) & 0xffff + + time_hi_version = property(get_time_hi_version) + + def get_clock_seq_hi_variant(self): +- return (self.int >> 56L) & 0xff ++ return (self.int >> 56) & 0xff + + clock_seq_hi_variant = property(get_clock_seq_hi_variant) + + def get_clock_seq_low(self): +- return (self.int >> 48L) & 0xff ++ return (self.int >> 48) & 0xff + + clock_seq_low = property(get_clock_seq_low) + + def get_time(self): +- return (((self.time_hi_version & 0x0fffL) << 48L) | +- (self.time_mid << 32L) | self.time_low) ++ return (((self.time_hi_version & 0x0fff) << 48) | ++ (self.time_mid << 32) | self.time_low) + + time = property(get_time) + + def get_clock_seq(self): +- return (((self.clock_seq_hi_variant & 0x3fL) << 8L) | ++ return (((self.clock_seq_hi_variant & 0x3f) << 8) | + self.clock_seq_low) + + clock_seq = property(get_clock_seq) +@@ -273,11 +273,11 @@ class UUID(object): + urn = property(get_urn) + + def get_variant(self): +- if not self.int & (0x8000 << 48L): ++ if not self.int & (0x8000 << 48): + return RESERVED_NCS +- elif not self.int & (0x4000 << 48L): ++ elif not self.int & (0x4000 << 48): + return RFC_4122 +- elif not self.int & (0x2000 << 48L): ++ elif not self.int & (0x2000 << 48): + return RESERVED_MICROSOFT + else: + return RESERVED_FUTURE +@@ -287,7 +287,7 @@ class UUID(object): + def get_version(self): + # The version bits are only meaningful for RFC 4122 UUIDs. + if self.variant == RFC_4122: +- return int((self.int >> 76L) & 0xf) ++ return int((self.int >> 76) & 0xf) + + version = property(get_version) + +@@ -383,9 +383,9 @@ def _netbios_getnode(): + if win32wnet.Netbios(ncb) != 0: + continue + status._unpack() +- bytes = map(ord, status.adapter_address) +- return ((bytes[0]<<40L) + (bytes[1]<<32L) + (bytes[2]<<24L) + +- (bytes[3]<<16L) + (bytes[4]<<8L) + bytes[5]) ++ bytes = list(map(ord, status.adapter_address)) ++ return ((bytes[0]<<40) + (bytes[1]<<32) + (bytes[2]<<24) + ++ (bytes[3]<<16) + (bytes[4]<<8) + bytes[5]) + + # Thanks to Thomas Heller for ctypes and for his help with its use here. + +@@ -437,7 +437,7 @@ def _windll_getnode(): + def _random_getnode(): + """Get a random node ID, with eighth bit set as suggested by RFC 4122.""" + import random +- return random.randrange(0, 1<<48L) | 0x010000000000L ++ return random.randrange(0, 1<<48) | 0x010000000000 + + _node = None + +@@ -487,18 +487,18 @@ def uuid1(node=None, clock_seq=None): + nanoseconds = int(time.time() * 1e9) + # 0x01b21dd213814000 is the number of 100-ns intervals between the + # UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00. +- timestamp = int(nanoseconds/100) + 0x01b21dd213814000L ++ timestamp = int(nanoseconds/100) + 0x01b21dd213814000 + if timestamp <= _last_timestamp: + timestamp = _last_timestamp + 1 + _last_timestamp = timestamp + if clock_seq is None: + import random +- clock_seq = random.randrange(1<<14L) # instead of stable storage +- time_low = timestamp & 0xffffffffL +- time_mid = (timestamp >> 32L) & 0xffffL +- time_hi_version = (timestamp >> 48L) & 0x0fffL +- clock_seq_low = clock_seq & 0xffL +- clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL ++ clock_seq = random.randrange(1<<14) # instead of stable storage ++ time_low = timestamp & 0xffffffff ++ time_mid = (timestamp >> 32) & 0xffff ++ time_hi_version = (timestamp >> 48) & 0x0fff ++ clock_seq_low = clock_seq & 0xff ++ clock_seq_hi_variant = (clock_seq >> 8) & 0x3f + if node is None: + node = getnode() + return UUID(fields=(time_low, time_mid, time_hi_version, +--- _pydev_imps/_pydev_xmlrpclib.py.orig 2016-04-26 16:50:14 UTC ++++ _pydev_imps/_pydev_xmlrpclib.py +@@ -146,9 +146,9 @@ from types import * + # Internal stuff + + try: +- unicode ++ str + except NameError: +- unicode = None # unicode support not available ++ str = None # unicode support not available + + try: + import datetime +@@ -162,8 +162,8 @@ except (NameError, AttributeError): + + def _decode(data, encoding, is8bit=re.compile("[\x80-\xff]").search): + # decode non-ascii string (if possible) +- if unicode and encoding and is8bit(data): +- data = unicode(data, encoding) ++ if str and encoding and is8bit(data): ++ data = str(data, encoding) + return data + + def escape(s, replace=string.replace): +@@ -171,7 +171,7 @@ def escape(s, replace=string.replace): + s = replace(s, "<", "<") + return replace(s, ">", ">",) + +-if unicode: ++if str: + def _stringify(string): + # convert to 7-bit ascii if possible + try: +@@ -186,11 +186,11 @@ __version__ = "1.0.1" + + # xmlrpc integer limits + try: +- long ++ int + except NameError: + long = int +-MAXINT = long(2) ** 31 - 1 +-MININT = long(-2) ** 31 ++MAXINT = int(2) ** 31 - 1 ++MININT = int(-2) ** 31 + + # -------------------------------------------------------------------- + # Error constants (from Dan Libby's specification at +@@ -291,7 +291,6 @@ class Fault(Error): + if _bool_is_builtin: + boolean = Boolean = bool #@UndefinedVariable + # to avoid breaking code which references xmlrpclib.{True,False} +- True, False = True, False + else: + class Boolean: + """Boolean-value wrapper. +@@ -319,11 +318,9 @@ else: + def __int__(self): + return self.value + +- def __nonzero__(self): ++ def __bool__(self): + return self.value + +- True, False = Boolean(1), Boolean(0) +- + ## + # Map true or false value to XML-RPC boolean values. + # +@@ -420,9 +417,9 @@ def _datetime_type(data): + + import base64 + try: +- import cStringIO as StringIO ++ import io as StringIO + except ImportError: +- import StringIO ++ import io + + class Binary: + """Wrapper for binary data.""" +@@ -448,7 +445,7 @@ class Binary: + + def encode(self, out): + out.write("\n") +- base64.encode(StringIO.StringIO(self.data), out) ++ base64.encode(io.StringIO(self.data), out) + out.write("\n") + + def _binary(data): +@@ -682,7 +679,7 @@ class Marshaller: + write("\n") + dispatch[StringType] = dump_string + +- if unicode: ++ if str: + def dump_unicode(self, value, write, escape=escape): + value = value.encode(self.encoding) + write("") +@@ -692,7 +689,7 @@ class Marshaller: + + def dump_array(self, value, write): + i = id(value) +- if self.memo.has_key(i): ++ if i in self.memo: + raise TypeError("cannot marshal recursive sequences") + self.memo[i] = None + dump = self.__dump +@@ -706,15 +703,15 @@ class Marshaller: + + def dump_struct(self, value, write, escape=escape): + i = id(value) +- if self.memo.has_key(i): ++ if i in self.memo: + raise TypeError("cannot marshal recursive dictionaries") + self.memo[i] = None + dump = self.__dump + write("\n") +- for k, v in value.items(): ++ for k, v in list(value.items()): + write("\n") + if type(k) is not StringType: +- if unicode and type(k) is UnicodeType: ++ if str and type(k) is UnicodeType: + k = k.encode(self.encoding) + else: + raise TypeError("dictionary key must be string") +@@ -1230,12 +1227,12 @@ class Transport: + if isinstance(host, TupleType): + host, x509 = host + +- import urllib +- auth, host = urllib.splituser(host) ++ import urllib.request, urllib.parse, urllib.error ++ auth, host = urllib.parse.splituser(host) + + if auth: + import base64 +- auth = base64.encodestring(urllib.unquote(auth)) ++ auth = base64.encodestring(urllib.parse.unquote(auth)) + auth = string.join(string.split(auth), "") # get rid of whitespace + extra_headers = [ + ("Authorization", "Basic " + auth) +@@ -1253,9 +1250,9 @@ class Transport: + + def make_connection(self, host): + # create a HTTP connection object from a host descriptor +- import httplib ++ import http.client + host, extra_headers, x509 = self.get_host_info(host) +- return httplib.HTTP(host) ++ return http.client.HTTP(host) + + ## + # Send request header. +@@ -1278,7 +1275,7 @@ class Transport: + connection.putheader("Host", host) + if extra_headers: + if isinstance(extra_headers, DictType): +- extra_headers = extra_headers.items() ++ extra_headers = list(extra_headers.items()) + for key, value in extra_headers: + connection.putheader(key, value) + +@@ -1355,10 +1352,10 @@ class SafeTransport(Transport): + def make_connection(self, host): + # create a HTTPS connection object from a host descriptor + # host may be a string, or a (host, x509-dict) tuple +- import httplib ++ import http.client + host, extra_headers, x509 = self.get_host_info(host) + try: +- HTTPS = httplib.HTTPS ++ HTTPS = http.client.HTTPS + except AttributeError: + raise NotImplementedError( + "your version of httplib doesn't support HTTPS" +@@ -1410,11 +1407,11 @@ class ServerProxy: + # establish a "logical" server connection + + # get the url +- import urllib +- type, uri = urllib.splittype(uri) ++ import urllib.request, urllib.parse, urllib.error ++ type, uri = urllib.parse.splittype(uri) + if type not in ("http", "https"): + raise IOError("unsupported XML-RPC protocol") +- self.__host, self.__handler = urllib.splithost(uri) ++ self.__host, self.__handler = urllib.parse.splithost(uri) + if not self.__handler: + self.__handler = "/RPC2" + +--- _pydevd_bundle/pydevconsole_code_for_ironpython.py.orig 2017-03-15 13:32:25 UTC ++++ _pydevd_bundle/pydevconsole_code_for_ironpython.py +@@ -102,23 +102,23 @@ def _maybe_compile(compiler, source, filename, symbol) + + try: + code = compiler(source, filename, symbol) +- except SyntaxError, err: ++ except SyntaxError as err: + pass + + try: + code1 = compiler(source + "\n", filename, symbol) +- except SyntaxError, err1: ++ except SyntaxError as err1: + pass + + try: + code2 = compiler(source + "\n\n", filename, symbol) +- except SyntaxError, err2: ++ except SyntaxError as err2: + pass + + if code: + return code + if not code1 and repr(err1) == repr(err2): +- raise SyntaxError, err1 ++ raise SyntaxError(err1) + + def _compile(source, filename, symbol): + return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT) +@@ -302,7 +302,7 @@ class InteractiveInterpreter: + + """ + try: +- exec code in self.locals ++ exec(code, self.locals) + except SystemExit: + raise + except: +@@ -338,7 +338,7 @@ class InteractiveInterpreter: + value = SyntaxError(msg, (filename, lineno, offset, line)) + sys.last_value = value + list = traceback.format_exception_only(type, value) +- map(self.write, list) ++ list(map(self.write, list)) + + def showtraceback(self): + """Display the exception that just occurred. +@@ -361,7 +361,7 @@ class InteractiveInterpreter: + list[len(list):] = traceback.format_exception_only(type, value) + finally: + tblist = tb = None +- map(self.write, list) ++ list(map(self.write, list)) + + def write(self, data): + """Write a string. +@@ -436,7 +436,7 @@ class InteractiveConsole(InteractiveInterpreter): + line = self.raw_input(prompt) + # Can be None if sys.stdin was redefined + encoding = getattr(sys.stdin, "encoding", None) +- if encoding and not isinstance(line, unicode): ++ if encoding and not isinstance(line, str): + line = line.decode(encoding) + except EOFError: + self.write("\n") +@@ -480,7 +480,7 @@ class InteractiveConsole(InteractiveInterpreter): + implementation. + + """ +- return raw_input(prompt) ++ return input(prompt) + + + def interact(banner=None, readfunc=None, local=None): +--- _pydevd_bundle/pydevd_exec.py.orig 2017-02-06 17:09:10 UTC ++++ _pydevd_bundle/pydevd_exec.py +@@ -1,5 +1,5 @@ + def Exec(exp, global_vars, local_vars=None): + if local_vars is not None: +- exec exp in global_vars, local_vars ++ exec(exp, global_vars, local_vars) + else: +- exec exp in global_vars +\ No newline at end of file ++ exec(exp, global_vars) +--- pydev_ipython/inputhookglut.py.orig 2017-02-06 17:09:10 UTC ++++ pydev_ipython/inputhookglut.py +@@ -95,7 +95,7 @@ def glut_close(): + def glut_int_handler(signum, frame): + # Catch sigint and print the defautl message + signal.signal(signal.SIGINT, signal.default_int_handler) +- print '\nKeyboardInterrupt' ++ print('\nKeyboardInterrupt') + # Need to reprint the prompt at this stage + + diff --git a/devel/py-pygpx/files/patch-2to3 b/devel/py-pygpx/files/patch-2to3 new file mode 100644 index 00000000000..a50a30ef4de --- /dev/null +++ b/devel/py-pygpx/files/patch-2to3 @@ -0,0 +1,44 @@ +--- pygpx.py.orig 2006-07-30 12:33:23 UTC ++++ pygpx.py +@@ -44,7 +44,7 @@ class GPXTrackPt: + elif node.nodeName == "ele": + self.elevation = float(node.firstChild.data) + else: +- raise ValueError, "Can't handle node", node.nodeName ++ raise ValueError("Can't handle node").with_traceback(node.nodeName) + + def distance(self, other): + """Compute the distance from this point to another.""" +@@ -72,7 +72,7 @@ class GPXTrackSeg: + elif node.nodeName == "trkpt": + self.trkpts.append(GPXTrackPt(node, self.version)) + else: +- raise ValueError, "Can't handle node <%s>" % node.nodeName ++ raise ValueError("Can't handle node <%s>" % node.nodeName) + + def distance(self): + """Return the distance along the track segment.""" +@@ -105,7 +105,7 @@ class GPXTrack: + elif node.nodeName == "number": + self.name = node.firstChild.data + else: +- raise ValueError, "Can't handle node <%s>" % node.nodeName ++ raise ValueError("Can't handle node <%s>" % node.nodeName) + + def distance(self): + """Return the distance for this track.""" +@@ -155,7 +155,7 @@ class GPX: + if self.version == "1.0": + self._init_version_1_0() + else: +- raise ValueError, "Can't handle version", self.version ++ raise ValueError("Can't handle version").with_traceback(self.version) + + def _init_version_1_0(self): + """Initialise a version 1.0 GPX instance.""" +@@ -168,4 +168,4 @@ class GPX: + elif node.nodeName == "trk": + self.tracks.append(GPXTrack(node, self.version)) + else: +- raise ValueError, "Can't handle node", node.nodeName ++ raise ValueError("Can't handle node").with_traceback(node.nodeName) diff --git a/devel/py-pyrepl/files/patch-2to3 b/devel/py-pyrepl/files/patch-2to3 new file mode 100644 index 00000000000..afdeccca2b3 --- /dev/null +++ b/devel/py-pyrepl/files/patch-2to3 @@ -0,0 +1,703 @@ +--- pyrepl/cmdrepl.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/cmdrepl.py +@@ -33,7 +33,7 @@ It was designed to let you do this: + which is in fact done by the `pythoni' script that comes with + pyrepl.""" + +-from __future__ import print_function ++ + + from pyrepl import completer + from pyrepl.completing_reader import CompletingReader as CR +--- pyrepl/completer.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/completer.py +@@ -18,7 +18,7 @@ + # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + try: +- import __builtin__ as builtins ++ import builtins as builtins + builtins # silence broken pyflakes + except ImportError: + import builtins +@@ -44,8 +44,8 @@ class Completer(object): + import keyword + matches = [] + for list in [keyword.kwlist, +- builtins.__dict__.keys(), +- self.ns.keys()]: ++ list(builtins.__dict__.keys()), ++ list(self.ns.keys())]: + for word in list: + if word.startswith(text) and word != "__builtins__": + matches.append(word) +--- pyrepl/completing_reader.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/completing_reader.py +@@ -64,7 +64,7 @@ def build_menu(cons, wordlist, start, use_brackets, so + else: + item = "%s " + padding = 2 +- maxlen = min(max(map(real_len, wordlist)), cons.width - padding) ++ maxlen = min(max(list(map(real_len, wordlist))), cons.width - padding) + cols = cons.width / (maxlen + padding) + rows = (len(wordlist) - 1)/cols + 1 + +--- pyrepl/historical_reader.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/historical_reader.py +@@ -23,7 +23,7 @@ from pyrepl.reader import Reader as R + isearch_keymap = tuple( + [('\\%03o'%c, 'isearch-end') for c in range(256) if chr(c) != '\\'] + \ + [(c, 'isearch-add-character') +- for c in map(chr, range(32, 127)) if c != '\\'] + \ ++ for c in map(chr, list(range(32, 127))) if c != '\\'] + \ + [('\\%03o'%c, 'isearch-add-character') + for c in range(256) if chr(c).isalpha() and chr(c) != '\\'] + \ + [('\\\\', 'self-insert'), +@@ -292,7 +292,7 @@ class HistoricalReader(R): + def finish(self): + super(HistoricalReader, self).finish() + ret = self.get_unicode() +- for i, t in self.transient_history.items(): ++ for i, t in list(self.transient_history.items()): + if i < len(self.history) and i != self.historyi: + self.history[i] = t + if ret: +--- pyrepl/input.py.orig 2019-04-16 13:00:52 UTC ++++ pyrepl/input.py +@@ -32,11 +32,11 @@ + # executive, temporary decision: [tab] and [C-i] are distinct, but + # [meta-key] is identified with [esc key]. We demand that any console + # class does quite a lot towards emulating a unix terminal. +-from __future__ import print_function ++ + import unicodedata + from collections import deque + import pprint +-from trace import trace ++from .trace import trace + + + class InputTranslator(object): +--- pyrepl/keymap.py.orig 2019-04-16 13:00:52 UTC ++++ pyrepl/keymap.py +@@ -174,17 +174,17 @@ def parse_keys(key): + + def compile_keymap(keymap, empty=b''): + r = {} +- for key, value in keymap.items(): ++ for key, value in list(keymap.items()): + if isinstance(key, bytes): + first = key[:1] + else: + first = key[0] + r.setdefault(first, {})[key[1:]] = value +- for key, value in r.items(): ++ for key, value in list(r.items()): + if empty in value: + if len(value) != 1: + raise KeySpecError( +- "key definitions for %s clash"%(value.values(),)) ++ "key definitions for %s clash"%(list(value.values()),)) + else: + r[key] = value[empty] + else: +--- pyrepl/keymaps.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/keymaps.py +@@ -62,9 +62,9 @@ reader_emacs_keymap = tuple( + (r'\M-\n', 'self-insert'), + (r'\', 'self-insert')] + \ + [(c, 'self-insert') +- for c in map(chr, range(32, 127)) if c <> '\\'] + \ ++ for c in map(chr, list(range(32, 127))) if c != '\\'] + \ + [(c, 'self-insert') +- for c in map(chr, range(128, 256)) if c.isalpha()] + \ ++ for c in map(chr, list(range(128, 256))) if c.isalpha()] + \ + [(r'\', 'up'), + (r'\', 'down'), + (r'\', 'left'), +@@ -101,9 +101,9 @@ python_emacs_keymap = comp_emacs_keymap + ( + + reader_vi_insert_keymap = tuple( + [(c, 'self-insert') +- for c in map(chr, range(32, 127)) if c <> '\\'] + \ ++ for c in map(chr, list(range(32, 127))) if c != '\\'] + \ + [(c, 'self-insert') +- for c in map(chr, range(128, 256)) if c.isalpha()] + \ ++ for c in map(chr, list(range(128, 256))) if c.isalpha()] + \ + [(r'\C-d', 'delete'), + (r'\', 'backspace'), + ('')]) +--- pyrepl/pygame_console.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/pygame_console.py +@@ -72,7 +72,7 @@ class FakeStdin: + # argh! + raise NotImplementedError + def readline(self, n=None): +- from reader import Reader ++ from .reader import Reader + try: + # this isn't quite right: it will clobber any prompt that's + # been printed. Not sure how to get around this... +@@ -130,7 +130,8 @@ class PyGameConsole(Console): + s.fill(c, [0, 600 - bmargin, 800, bmargin]) + s.fill(c, [800 - rmargin, 0, lmargin, 600]) + +- def refresh(self, screen, (cx, cy)): ++ def refresh(self, screen, xxx_todo_changeme): ++ (cx, cy) = xxx_todo_changeme + self.screen = screen + self.pygame_screen.fill(colors.bg, + [0, tmargin + self.cur_top + self.scroll, +@@ -211,12 +212,12 @@ class PyGameConsole(Console): + meta = bool(pyg_event.mod & (KMOD_ALT|KMOD_META)) + + try: +- return self.k[(pyg_event.unicode, meta, ctrl)], pyg_event.unicode ++ return self.k[(pyg_event.str, meta, ctrl)], pyg_event.str + except KeyError: + try: +- return self.k[(pyg_event.key, meta, ctrl)], pyg_event.unicode ++ return self.k[(pyg_event.key, meta, ctrl)], pyg_event.str + except KeyError: +- return "invalid-key", pyg_event.unicode ++ return "invalid-key", pyg_event.str + + def get_event(self, block=1): + """Return an Event instance. Returns None if |block| is false +@@ -239,7 +240,7 @@ class PyGameConsole(Console): + self.cmd_buf += c.encode('ascii', 'replace') + self.k = k + +- if not isinstance(k, types.DictType): ++ if not isinstance(k, dict): + e = Event(k, self.cmd_buf, []) + self.k = self.keymap + self.cmd_buf = '' +@@ -282,7 +283,7 @@ class PyGameConsole(Console): + + def forgetinput(self): + """Forget all pending, but not yet processed input.""" +- while pygame.event.poll().type <> NOEVENT: ++ while pygame.event.poll().type != NOEVENT: + pass + + def getpending(self): +@@ -299,7 +300,7 @@ class PyGameConsole(Console): + + def wait(self): + """Wait for an event.""" +- raise Exception, "erp!" ++ raise Exception("erp!") + + def repaint(self): + # perhaps we should consolidate grobs? +--- pyrepl/pygame_keymap.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/pygame_keymap.py +@@ -85,27 +85,25 @@ def _parse_key1(key, s): + while not ret and s < len(key): + if key[s] == '\\': + c = key[s+1].lower() +- if _escapes.has_key(c): ++ if c in _escapes: + ret = _escapes[c] + s += 2 + elif c == "c": + if key[s + 2] != '-': +- raise KeySpecError, \ +- "\\C must be followed by `-' (char %d of %s)"%( +- s + 2, repr(key)) ++ raise KeySpecError("\\C must be followed by `-' (char %d of %s)"%( ++ s + 2, repr(key))) + if ctrl: +- raise KeySpecError, "doubled \\C- (char %d of %s)"%( +- s + 1, repr(key)) ++ raise KeySpecError("doubled \\C- (char %d of %s)"%( ++ s + 1, repr(key))) + ctrl = 1 + s += 3 + elif c == "m": + if key[s + 2] != '-': +- raise KeySpecError, \ +- "\\M must be followed by `-' (char %d of %s)"%( +- s + 2, repr(key)) ++ raise KeySpecError("\\M must be followed by `-' (char %d of %s)"%( ++ s + 2, repr(key))) + if meta: +- raise KeySpecError, "doubled \\M- (char %d of %s)"%( +- s + 1, repr(key)) ++ raise KeySpecError("doubled \\M- (char %d of %s)"%( ++ s + 1, repr(key))) + meta = 1 + s += 3 + elif c.isdigit(): +@@ -119,28 +117,25 @@ def _parse_key1(key, s): + elif c == '<': + t = key.find('>', s) + if t == -1: +- raise KeySpecError, \ +- "unterminated \\< starting at char %d of %s"%( +- s + 1, repr(key)) ++ raise KeySpecError("unterminated \\< starting at char %d of %s"%( ++ s + 1, repr(key))) + try: + ret = _keynames[key[s+2:t].lower()] + s = t + 1 + except KeyError: +- raise KeySpecError, \ +- "unrecognised keyname `%s' at char %d of %s"%( +- key[s+2:t], s + 2, repr(key)) ++ raise KeySpecError("unrecognised keyname `%s' at char %d of %s"%( ++ key[s+2:t], s + 2, repr(key))) + if ret is None: + return None, s + else: +- raise KeySpecError, \ +- "unknown backslash escape %s at char %d of %s"%( +- `c`, s + 2, repr(key)) ++ raise KeySpecError("unknown backslash escape %s at char %d of %s"%( ++ repr(c), s + 2, repr(key))) + else: + if ctrl: + ret = chr(ord(key[s]) & 0x1f) # curses.ascii.ctrl() +- ret = unicode(ret) ++ ret = str(ret) + else: +- ret = unicode(key[s]) ++ ret = str(key[s]) + s += 1 + return (ret, meta, ctrl), s + +@@ -156,13 +151,12 @@ def parse_keys(key): + + def _compile_keymap(keymap): + r = {} +- for key, value in keymap.items(): ++ for key, value in list(keymap.items()): + r.setdefault(key[0], {})[key[1:]] = value +- for key, value in r.items(): +- if value.has_key(()): +- if len(value) <> 1: +- raise KeySpecError, \ +- "key definitions for %s clash"%(value.values(),) ++ for key, value in list(r.items()): ++ if () in value: ++ if len(value) != 1: ++ raise KeySpecError("key definitions for %s clash"%(list(value.values()),)) + else: + r[key] = value[()] + else: +@@ -173,7 +167,7 @@ def compile_keymap(keymap): + r = {} + for key, value in keymap: + k = parse_keys(key) +- if value is None and r.has_key(k): ++ if value is None and k in r: + del r[k] + if k is not None: + r[k] = value +@@ -182,7 +176,7 @@ def compile_keymap(keymap): + def keyname(key): + longest_match = '' + longest_match_name = '' +- for name, keyseq in keyset.items(): ++ for name, keyseq in list(keyset.items()): + if keyseq and key.startswith(keyseq) and \ + len(keyseq) > len(longest_match): + longest_match = keyseq +@@ -202,7 +196,7 @@ def unparse_key(keyseq): + return '' + name, s = keyname(keyseq) + if name: +- if name <> 'escape' or s == len(keyseq): ++ if name != 'escape' or s == len(keyseq): + return '\\<' + name + '>' + unparse_key(keyseq[s:]) + else: + return '\\M-' + unparse_key(keyseq[1:]) +@@ -211,7 +205,7 @@ def unparse_key(keyseq): + r = keyseq[1:] + if c == '\\': + p = '\\\\' +- elif _unescapes.has_key(c): ++ elif c in _unescapes: + p = _unescapes[c] + elif ord(c) < ord(' '): + p = '\\C-%s'%(chr(ord(c)+96),) +@@ -226,7 +220,7 @@ def _unparse_keyf(keyseq): + return [] + name, s = keyname(keyseq) + if name: +- if name <> 'escape' or s == len(keyseq): ++ if name != 'escape' or s == len(keyseq): + return [name] + _unparse_keyf(keyseq[s:]) + else: + rest = _unparse_keyf(keyseq[1:]) +@@ -236,7 +230,7 @@ def _unparse_keyf(keyseq): + r = keyseq[1:] + if c == '\\': + p = '\\' +- elif _unescapes.has_key(c): ++ elif c in _unescapes: + p = _unescapes[c] + elif ord(c) < ord(' '): + p = 'C-%s'%(chr(ord(c)+96),) +--- pyrepl/python_reader.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/python_reader.py +@@ -20,8 +20,8 @@ + # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + # one impressive collections of imports: +-from __future__ import print_function +-from __future__ import unicode_literals ++ ++ + from pyrepl.completing_reader import CompletingReader + from pyrepl.historical_reader import HistoricalReader + from pyrepl import completing_reader, reader +@@ -31,9 +31,9 @@ import imp, sys, os, re, code, traceback + import atexit, warnings + + try: +- unicode ++ str + except: +- unicode = str ++ str = str + + try: + imp.find_module("twisted") +@@ -179,7 +179,7 @@ class ReaderConsole(code.InteractiveInterpreter): + else: + return + try: +- execfile(initfile, self.locals, self.locals) ++ exec(compile(open(initfile, "rb").read(), initfile, 'exec'), self.locals, self.locals) + except: + etype, value, tb = sys.exc_info() + traceback.print_exception(etype, value, tb.tb_next) +@@ -203,7 +203,7 @@ class ReaderConsole(code.InteractiveInterpreter): + # can't have warnings spewed onto terminal + sv = warnings.showwarning + warnings.showwarning = eat_it +- l = unicode(self.reader.readline(), 'utf-8') ++ l = str(self.reader.readline(), 'utf-8') + finally: + warnings.showwarning = sv + except KeyboardInterrupt: +@@ -301,7 +301,7 @@ class ReaderConsole(code.InteractiveInterpreter): + self.prepare() + try: + while 1: +- if sys.modules.has_key("_tkinter"): ++ if "_tkinter" in sys.modules: + self.really_tkinteract() + # really_tkinteract is not expected to + # return except via an exception, but: +--- pyrepl/reader.py.orig 2019-04-16 13:00:52 UTC ++++ pyrepl/reader.py +@@ -19,32 +19,32 @@ + # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-from __future__ import unicode_literals ++ + import unicodedata + from pyrepl import commands + from pyrepl import input + try: +- unicode ++ str + except NameError: +- unicode = str +- unichr = chr +- basestring = bytes, str ++ str = str ++ chr = chr ++ str = bytes, str + + + def _make_unctrl_map(): + uc_map = {} +- for c in map(unichr, range(256)): ++ for c in map(chr, list(range(256))): + if unicodedata.category(c)[0] != 'C': + uc_map[c] = c + for i in range(32): +- c = unichr(i) +- uc_map[c] = '^' + unichr(ord('A') + i - 1) ++ c = chr(i) ++ uc_map[c] = '^' + chr(ord('A') + i - 1) + uc_map[b'\t'] = ' ' # display TABs as 4 characters +- uc_map[b'\177'] = unicode('^?') ++ uc_map[b'\177'] = str('^?') + for i in range(256): +- c = unichr(i) ++ c = chr(i) + if c not in uc_map: +- uc_map[c] = unicode('\\%03o') % i ++ uc_map[c] = str('\\%03o') % i + return uc_map + + +@@ -87,17 +87,17 @@ del _make_unctrl_map + + [SYNTAX_WHITESPACE, + SYNTAX_WORD, +- SYNTAX_SYMBOL] = range(3) ++ SYNTAX_SYMBOL] = list(range(3)) + + + def make_default_syntax_table(): + # XXX perhaps should use some unicodedata here? + st = {} +- for c in map(unichr, range(256)): ++ for c in map(chr, list(range(256))): + st[c] = SYNTAX_SYMBOL +- for c in [a for a in map(unichr, range(256)) if a.isalpha()]: ++ for c in [a for a in map(chr, list(range(256))) if a.isalpha()]: + st[c] = SYNTAX_WORD +- st[unicode('\n')] = st[unicode(' ')] = SYNTAX_WHITESPACE ++ st[str('\n')] = st[str(' ')] = SYNTAX_WHITESPACE + return st + + default_keymap = tuple( +@@ -145,9 +145,9 @@ default_keymap = tuple( + #(r'\M-\n', 'insert-nl'), + ('\\\\', 'self-insert')] + + [(c, 'self-insert') +- for c in map(chr, range(32, 127)) if c != '\\'] + ++ for c in map(chr, list(range(32, 127))) if c != '\\'] + + [(c, 'self-insert') +- for c in map(chr, range(128, 256)) if c.isalpha()] + ++ for c in map(chr, list(range(128, 256))) if c.isalpha()] + + [(r'\', 'up'), + (r'\', 'down'), + (r'\', 'left'), +@@ -245,7 +245,7 @@ feeling more loquacious than I am now.""" + self.console = console + self.commands = {} + self.msg = '' +- for v in vars(commands).values(): ++ for v in list(vars(commands).values()): + if (isinstance(v, type) and + issubclass(v, commands.Command) and + v.__name__[0].islower()): +@@ -273,7 +273,7 @@ feeling more loquacious than I am now.""" + screeninfo = [] + w = self.console.width - 1 + p = self.pos +- for ln, line in zip(range(len(lines)), lines): ++ for ln, line in zip(list(range(len(lines))), lines): + ll = len(line) + if 0 <= p <= ll: + if self.msg and not self.msg_at_bottom: +@@ -523,7 +523,7 @@ feeling more loquacious than I am now.""" + + def do_cmd(self, cmd): + #print cmd +- if isinstance(cmd[0], basestring): ++ if isinstance(cmd[0], str): + #XXX: unify to text + cmd = self.commands.get(cmd[0], + commands.invalid_command)(self, *cmd) +@@ -619,11 +619,11 @@ feeling more loquacious than I am now.""" + def get_buffer(self, encoding=None): + if encoding is None: + encoding = self.console.encoding +- return unicode('').join(self.buffer).encode(self.console.encoding) ++ return str('').join(self.buffer).encode(self.console.encoding) + + def get_unicode(self): + """Return the current buffer as a unicode string.""" +- return unicode('').join(self.buffer) ++ return str('').join(self.buffer) + + + def test(): +--- pyrepl/readline.py.orig 2019-04-16 14:11:33 UTC ++++ pyrepl/readline.py +@@ -248,16 +248,16 @@ class _ReadlineWrapper(object): + self.config.completer_delims = dict.fromkeys(string) + + def get_completer_delims(self): +- chars = self.config.completer_delims.keys() ++ chars = list(self.config.completer_delims.keys()) + chars.sort() + return ''.join(chars) + + def _histline(self, line): + line = line.rstrip('\n') + try: +- return unicode(line, ENCODING) ++ return str(line, ENCODING) + except UnicodeDecodeError: # bah, silently fall back... +- return unicode(line, 'utf-8', 'replace') ++ return str(line, 'utf-8', 'replace') + + def get_history_length(self): + return self.saved_history_length +@@ -293,7 +293,7 @@ class _ReadlineWrapper(object): + history = self.get_reader().get_trimmed_history(maxlength) + f = open(os.path.expanduser(filename), 'w') + for entry in history: +- if isinstance(entry, unicode): ++ if isinstance(entry, str): + try: + entry = entry.encode(ENCODING) + except UnicodeEncodeError: # bah, silently fall back... +@@ -340,7 +340,7 @@ class _ReadlineWrapper(object): + def _get_idxs(self): + start = cursor = self.get_reader().pos + buf = self.get_line_buffer() +- for i in xrange(cursor - 1, -1, -1): ++ for i in range(cursor - 1, -1, -1): + if buf[i] in self.get_completer_delims(): + break + start = i +@@ -396,7 +396,7 @@ def _make_stub(_name, _ret): + def stub(*args, **kwds): + import warnings + warnings.warn("readline.%s() not implemented" % _name, stacklevel=2) +- stub.func_name = _name ++ stub.__name__ = _name + globals()[_name] = stub + + for _name, _ret in [ +@@ -438,14 +438,14 @@ def _setup(): + del sys.__raw_input__ + except AttributeError: + pass +- return raw_input(prompt) ++ return input(prompt) + sys.__raw_input__ = _wrapper.raw_input + + else: + # this is not really what readline.c does. Better than nothing I guess +- import __builtin__ +- _old_raw_input = __builtin__.raw_input +- __builtin__.raw_input = _wrapper.raw_input ++ import builtins ++ _old_raw_input = builtins.raw_input ++ builtins.raw_input = _wrapper.raw_input + + _old_raw_input = None + _setup() +--- pyrepl/unix_console.py.orig 2015-12-06 11:35:46 UTC ++++ pyrepl/unix_console.py +@@ -40,9 +40,9 @@ class InvalidTerminal(RuntimeError): + pass + + try: +- unicode ++ str + except NameError: +- unicode = str ++ str = str + + _error = (termios.error, curses.error, InvalidTerminal) + +@@ -221,7 +221,7 @@ class UnixConsole(Console): + + self.__offset = offset + +- for y, oldline, newline, in zip(range(offset, offset + height), ++ for y, oldline, newline, in zip(list(range(offset, offset + height)), + oldscr, + newscr): + if oldline != newline: +@@ -533,7 +533,7 @@ class UnixConsole(Console): + amount = struct.unpack( + "i", ioctl(self.input_fd, FIONREAD, "\0\0\0\0"))[0] + data = os.read(self.input_fd, amount) +- raw = unicode(data, self.encoding, 'replace') ++ raw = str(data, self.encoding, 'replace') + #XXX: something is wrong here + e.data += raw + e.raw += raw +@@ -549,7 +549,7 @@ class UnixConsole(Console): + + amount = 10000 + data = os.read(self.input_fd, amount) +- raw = unicode(data, self.encoding, 'replace') ++ raw = str(data, self.encoding, 'replace') + #XXX: something is wrong here + e.data += raw + e.raw += raw +--- pyrepl/unix_eventqueue.py.orig 2019-04-16 13:00:52 UTC ++++ pyrepl/unix_eventqueue.py +@@ -30,9 +30,9 @@ from .trace import trace + from termios import tcgetattr, VERASE + import os + try: +- unicode ++ str + except NameError: +- unicode = str ++ str = str + + + _keynames = { +@@ -74,7 +74,7 @@ CTRL_ARROW_KEYCODE = { + + def general_keycodes(): + keycodes = {} +- for key, tiname in _keynames.items(): ++ for key, tiname in list(_keynames.items()): + keycode = curses.tigetstr(tiname) + trace('key {key} tiname {tiname} keycode {keycode!r}', **locals()) + if keycode: +@@ -87,7 +87,7 @@ def EventQueue(fd, encoding): + keycodes = general_keycodes() + if os.isatty(fd): + backspace = tcgetattr(fd)[6][VERASE] +- keycodes[backspace] = unicode('backspace') ++ keycodes[backspace] = str('backspace') + k = keymap.compile_keymap(keycodes) + trace('keymap {k!r}', k=k) + return EncodedQueue(k, encoding) +@@ -133,7 +133,7 @@ class EncodedQueue(object): + self.insert(Event('key', k, self.flush_buf())) + self.k = self.ck + +- elif self.buf and self.buf[0] == 033: # 033 == escape ++ elif self.buf and self.buf[0] == 0o33: # 033 == escape + # escape sequence not recognized by our keymap: propagate it + # outside so that i can be recognized as an M-... key (see also + # the docstring in keymap.py, in particular the line \\E. +--- testing/infrastructure.py.orig 2015-12-06 11:35:46 UTC ++++ testing/infrastructure.py +@@ -17,7 +17,7 @@ + # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-from __future__ import print_function ++ + from pyrepl.reader import Reader + from pyrepl.console import Console, Event + +--- testing/test_readline.py.orig 2015-12-06 11:35:46 UTC ++++ testing/test_readline.py +@@ -5,7 +5,7 @@ import sys + + if sys.version_info < (3, ): + bytes_type = str +- unicode_type = unicode ++ unicode_type = str + else: + bytes_type = bytes + unicode_type = str +--- testing/test_unix_reader.py.orig 2019-04-16 13:00:52 UTC ++++ testing/test_unix_reader.py +@@ -1,11 +1,11 @@ +-from __future__ import unicode_literals ++ + from pyrepl.unix_eventqueue import EncodedQueue, Event + + + def test_simple(): + q = EncodedQueue({}, 'utf-8') + +- a = u'\u1234' ++ a = '\u1234' + b = a.encode('utf-8') + for c in b: + q.push(c) diff --git a/devel/py-pytest-flake8/Makefile b/devel/py-pytest-flake8/Makefile index 39c086c3f68..2ec1662741c 100644 --- a/devel/py-pytest-flake8/Makefile +++ b/devel/py-pytest-flake8/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pytest-flake8 -PORTVERSION= 1.0.7 +PORTVERSION= 1.1.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -15,7 +15,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}flake8>=3.5:devel/py-flake8@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pytest>=3.5:devel/py-pytest@${PY_FLAVOR} -USES= python:3.6+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/devel/py-pytest-flake8/distinfo b/devel/py-pytest-flake8/distinfo index d7289d8d95e..015668183df 100644 --- a/devel/py-pytest-flake8/distinfo +++ b/devel/py-pytest-flake8/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1608366150 -SHA256 (pytest-flake8-1.0.7.tar.gz) = f0259761a903563f33d6f099914afef339c085085e643bee8343eb323b32dd6b -SIZE (pytest-flake8-1.0.7.tar.gz) = 9560 +TIMESTAMP = 1647264570 +SHA256 (pytest-flake8-1.1.0.tar.gz) = 358d449ca06b80dbadcb43506cd3e38685d273b4968ac825da871bd4cc436202 +SIZE (pytest-flake8-1.1.0.tar.gz) = 9070 diff --git a/devel/py-pytest-subtests/Makefile b/devel/py-pytest-subtests/Makefile new file mode 100644 index 00000000000..12848c3e836 --- /dev/null +++ b/devel/py-pytest-subtests/Makefile @@ -0,0 +1,23 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= pytest-subtests +PORTVERSION= 0.3.2 +CATEGORIES= devel python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= unittest subTest() support and subtests fixture + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE + +BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}setuptools_scm>=0:devel/py-setuptools_scm@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=4.4.0,1:devel/py-pytest@${PY_FLAVOR} + +USES= python:3.5+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +.include diff --git a/devel/py-pytest-subtests/distinfo b/devel/py-pytest-subtests/distinfo new file mode 100644 index 00000000000..40dc10cbcff --- /dev/null +++ b/devel/py-pytest-subtests/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264404 +SHA256 (pytest-subtests-0.3.2.tar.gz) = 677281a196092c06d3da8e6408f0c1362b3f7b180e3c0e9113c7209b6b48afd7 +SIZE (pytest-subtests-0.3.2.tar.gz) = 9694 diff --git a/devel/py-pytest-subtests/pkg-descr b/devel/py-pytest-subtests/pkg-descr new file mode 100644 index 00000000000..b39ec1fcc45 --- /dev/null +++ b/devel/py-pytest-subtests/pkg-descr @@ -0,0 +1,4 @@ +This pytest plugin was generated with Cookiecutter along with @hackebrot's +cookiecutter-pytest-plugin template. + +WWW: https://github.com/pytest-dev/pytest-subtests diff --git a/devel/py-python-application/files/patch-2to3 b/devel/py-python-application/files/patch-2to3 new file mode 100644 index 00000000000..efc63790d6a --- /dev/null +++ b/devel/py-python-application/files/patch-2to3 @@ -0,0 +1,1117 @@ +--- application/configuration/__init__.py.orig 2019-08-02 13:55:47 UTC ++++ application/configuration/__init__.py +@@ -3,7 +3,7 @@ + + import os + +-from ConfigParser import SafeConfigParser, NoSectionError ++from configparser import SafeConfigParser, NoSectionError + from inspect import isclass + from itertools import chain + from types import BuiltinFunctionType +@@ -106,7 +106,7 @@ class SaveState(object): + return self.__state__[item] + + def __iter__(self): +- return self.__state__.iteritems() ++ return iter(self.__state__.items()) + + def __len__(self): + return len(self.__state__) +@@ -143,10 +143,10 @@ class ConfigSectionType(type): + def __new__(mcls, name, bases, dictionary): + settings = {} + # copy all settings defined by parents unless also defined in the class being constructed +- for name, setting in chain(*(cls.__settings__.iteritems() for cls in bases if isinstance(cls, ConfigSectionType))): ++ for name, setting in chain(*(iter(cls.__settings__.items()) for cls in bases if isinstance(cls, ConfigSectionType))): + if name not in dictionary and name not in settings: + settings[name] = ConfigSetting(type=setting.type, value=setting.value) +- for attr, value in dictionary.iteritems(): ++ for attr, value in dictionary.items(): + if isinstance(value, ConfigSetting): + settings[attr] = value + elif attr.startswith('__') or isdescriptor(value) or type(value) is BuiltinFunctionType: +@@ -174,7 +174,7 @@ class ConfigSectionType(type): + return '%s:\n%s' % (cls.__name__, '\n'.join(' %s = %r' % (name, value) for name, value in cls) or ' pass') + + def __iter__(cls): +- return ((name, descriptor.__get__(cls, cls.__class__)) for name, descriptor in cls.__settings__.iteritems()) ++ return ((name, descriptor.__get__(cls, cls.__class__)) for name, descriptor in cls.__settings__.items()) + + def __setattr__(cls, name, value): + if name == '__settings__' or name not in cls.__settings__: # need to check for __settings__ as it is set first and the second part of the test depends on it being available +@@ -198,7 +198,7 @@ class ConfigSectionType(type): + config_file = cfgfile + else: + config_file = cls.__cfgtype__(cfgfile) +- if isinstance(section, basestring): ++ if isinstance(section, str): + section_list = (section,) + else: + section_list = section +@@ -214,7 +214,7 @@ class ConfigSectionType(type): + if not set(kw).issubset(cls.__settings__): + raise TypeError('Got unexpected keyword argument %r' % set(kw).difference(cls.__settings__).pop()) + with AtomicUpdate(cls): +- for name, value in kw.iteritems(): ++ for name, value in kw.items(): + setattr(cls, name, value) + + def reset(cls, state=None): +@@ -224,11 +224,11 @@ class ConfigSectionType(type): + raise TypeError('state should be a SaveState instance') + if state.__owner__ is not cls: + raise ValueError('save state does not belong to this config section') +- for name, descriptor in cls.__settings__.iteritems(): ++ for name, descriptor in cls.__settings__.items(): + descriptor.__set__(cls, state[name], convert=False) + + +-class ConfigSection(object): ++class ConfigSection(object, metaclass=ConfigSectionType): + """ + Defines a section in the configuration file + +@@ -245,8 +245,6 @@ class ConfigSection(object): + for reading multiple sections (they will be read in + the order the iterable returns them) + """ +- +- __metaclass__ = ConfigSectionType + + __cfgtype__ = ConfigFile + __cfgfile__ = None +--- application/configuration/datatypes.py.orig 2019-07-30 19:01:31 UTC ++++ application/configuration/datatypes.py +@@ -19,7 +19,7 @@ class Boolean(object): + '0': False, 'no': False, 'false': False, 'off': False} + + def __new__(cls, value): +- if isinstance(value, (int, long, float)): ++ if isinstance(value, (int, float)): + return bool(value) + elif not hasattr(value, 'lower'): + raise TypeError('value must be a string, number or boolean') +@@ -33,9 +33,9 @@ class LogLevel(object): + """A log level indicated by a non-negative integer or one of the named attributes of log.level""" + + def __new__(cls, value): +- if isinstance(value, basestring): ++ if isinstance(value, str): + value = value.upper() +- elif not isinstance(value, (int, long)): ++ elif not isinstance(value, int): + raise TypeError('value must be a string or number') + named_levels = {level.name: level for level in log.level.named_levels} + if value in named_levels: +@@ -52,7 +52,7 @@ class StringList(object): + def __new__(cls, value): + if isinstance(value, (tuple, list)): + return [str(x) for x in value] +- elif isinstance(value, basestring): ++ elif isinstance(value, str): + if value.lower() in ('none', ''): + return [] + return re.split(r'\s*,\s*', value) +@@ -77,7 +77,7 @@ class Hostname(str): + """A Hostname or an IP address. The keyword `any' stands for '0.0.0.0'""" + + def __new__(cls, value): +- if not isinstance(value, basestring): ++ if not isinstance(value, str): + raise TypeError('value must be a string') + if value.lower() == 'any': + return '0.0.0.0' +@@ -90,7 +90,7 @@ class HostnameList(object): + def __new__(cls, description): + if isinstance(description, (list, tuple)): + return [Hostname(x) for x in description] +- elif not isinstance(description, basestring): ++ elif not isinstance(description, str): + raise TypeError('value must be a string, list or tuple') + if description.lower() == 'none': + return [] +@@ -130,14 +130,14 @@ class NetworkRange(object): + """ + + def __new__(cls, description): +- if isinstance(description, tuple) and len(description) == 2 and all(isinstance(item, (int, long)) and 0 <= item < 2**32 for item in description): ++ if isinstance(description, tuple) and len(description) == 2 and all(isinstance(item, int) and 0 <= item < 2**32 for item in description): + return description +- elif not isinstance(description, basestring): ++ elif not isinstance(description, str): + raise TypeError('value must be a string, or a tuple with 2 32-bit unsigned integers') + if not description or description.lower() == 'none': +- return 0L, 0xFFFFFFFFL ++ return 0, 0xFFFFFFFF + if description.lower() == 'any': +- return 0L, 0L # This is the any address 0.0.0.0 ++ return 0, 0 # This is the any address 0.0.0.0 + match = re.search(r'^(?P
.+?)/(?P\d+)$', description) + if match: + ip_address = match.group('address') +@@ -154,7 +154,7 @@ class NetworkRange(object): + network_address = socket.inet_aton(ip_address) + except Exception: + raise ValueError('invalid IP address: %r' % ip_address) +- network_mask = (0xFFFFFFFFL << 32-mask_bits) & 0xFFFFFFFFL ++ network_mask = (0xFFFFFFFF << 32-mask_bits) & 0xFFFFFFFF + base_address = struct.unpack('!L', network_address)[0] & network_mask + return base_address, network_mask + +@@ -167,7 +167,7 @@ class NetworkRangeList(object): + return description + elif isinstance(description, (list, tuple)): + return [NetworkRange(x) for x in description] or None +- elif not isinstance(description, basestring): ++ elif not isinstance(description, str): + raise TypeError('value must be a string, list, tuple or None') + if description.lower() == 'none': + return None +@@ -206,9 +206,9 @@ class NetworkAddress(object): + def __new__(cls, value): + if value is None: + return value +- elif isinstance(value, tuple) and len(value) == 2 and isinstance(value[1], (int, long)): ++ elif isinstance(value, tuple) and len(value) == 2 and isinstance(value[1], int): + return Hostname(value[0]), value[1] +- elif not isinstance(value, basestring): ++ elif not isinstance(value, str): + raise TypeError('value must be a string, a (host, port) tuple or None') + if value.lower() == 'none': + return None +--- application/debug/memory.py.orig 2019-07-30 18:59:31 UTC ++++ application/debug/memory.py +@@ -67,7 +67,7 @@ class Cycle(tuple): + priority = 2 + elif type(obj).__module__ in ('__builtin__', 'builtins'): + priority = 1 +- elif isinstance(obj, (tuple, list, dict, set, frozenset, str, unicode)): ++ elif isinstance(obj, (tuple, list, dict, set, frozenset, str)): + priority = 3 + else: + priority = 4 +@@ -84,9 +84,9 @@ class Cycle(tuple): + d = cycle.popleft() + try: + if cycle: +- string += ' .%s' % (key for key, value in d.iteritems() if value is cycle[0]).next() ++ string += ' .%s' % next((key for key, value in d.items() if value is cycle[0])) + else: +- string += ' .%s' % (key for key, value in d.iteritems() if value is first_obj).next() ++ string += ' .%s' % next((key for key, value in d.items() if value is first_obj)) + except StopIteration: + string += ' .__dict__ -> %s' % repr(d) + string += ' -> ' +@@ -96,7 +96,7 @@ class Cycle(tuple): + + + def memory_dump(show_cycles=True, show_objects=False): +- print '\nGARBAGE:' ++ print('\nGARBAGE:') + gc.collect() + garbage = gc.garbage[:] + +@@ -109,7 +109,7 @@ def memory_dump(show_cycles=True, show_objects=False): + cycles = set() + remaining_nodes = nodes.copy() + while remaining_nodes: +- path = [next(remaining_nodes.itervalues())] ++ path = [next(iter(remaining_nodes.values()))] + while path: + node = path[-1] + remaining_nodes.pop(id(node.object), None) +@@ -123,16 +123,16 @@ def memory_dump(show_cycles=True, show_objects=False): + node.visitable_successors = deque(node.successors) + path.pop(-1) + +- for node in nodes.itervalues(): ++ for node in nodes.values(): + node.successors = node.visitable_successors = None + +- print '\nCOLLECTABLE CYCLES:' ++ print('\nCOLLECTABLE CYCLES:') + for cycle in (c for c in cycles if c.collectable): +- print cycle ++ print(cycle) + +- print '\nUNCOLLECTABLE CYCLES:' ++ print('\nUNCOLLECTABLE CYCLES:') + for cycle in (c for c in cycles if not c.collectable): +- print cycle ++ print(cycle) + + if show_objects: + try: +@@ -141,12 +141,12 @@ def memory_dump(show_cycles=True, show_objects=False): + except Exception: + console_width = 80 + +- print '\nGARBAGE OBJECTS:' ++ print('\nGARBAGE OBJECTS:') + for x in garbage: + s = str(x) + if len(s) > console_width-2: + s = s[:console_width-5] + '...' +- print '%s\n %s' % (type(x), s) ++ print('%s\n %s' % (type(x), s)) + + + gc.enable() +--- application/debug/timing.py.orig 2019-07-30 18:55:18 UTC ++++ application/debug/timing.py +@@ -27,7 +27,7 @@ import struct + import sys + + from collections import deque +-from itertools import chain, izip, takewhile ++from itertools import chain, takewhile + from time import clock, time + + from application.python.decorator import decorator, preserve_signature +@@ -37,8 +37,8 @@ from application.python.types import MarkerType + __all__ = 'Timer', 'TimeProbe', 'timer', 'time_probe', 'measure_time' + + +-class Automatic(object): +- __metaclass__ = MarkerType ++class Automatic(object, metaclass=MarkerType): ++ pass + + + class Autodetect(int): +@@ -121,11 +121,11 @@ class Timer(object): + normalized_time, time_unit = normalize_time(statement_time) + + if self.description is not None: +- format_string = u'{} loops, best of {}: {:.{precision}g} {} per loop ({:.{rate_precision}f} operations/sec); {description}' ++ format_string = '{} loops, best of {}: {:.{precision}g} {} per loop ({:.{rate_precision}f} operations/sec); {description}' + else: +- format_string = u'{} loops, best of {}: {:.{precision}g} {} per loop ({:.{rate_precision}f} operations/sec)' ++ format_string = '{} loops, best of {}: {:.{precision}g} {} per loop ({:.{rate_precision}f} operations/sec)' + rate_precision = 2 if statement_rate < 10 else 1 if statement_rate < 100 else 0 +- print format_string.format(loops, self.repeat, normalized_time, time_unit, statement_rate, description=self.description, precision=3, rate_precision=rate_precision) ++ print(format_string.format(loops, self.repeat, normalized_time, time_unit, statement_rate, description=self.description, precision=3, rate_precision=rate_precision)) + finally: + del parent + finally: +@@ -245,7 +245,7 @@ class Timer(object): + byte_increments.appendleft(len(loop_header)) + line_increments.appendleft(1) + +- line_numbers_table = bytes(bytearray(chain.from_iterable(takewhile(WithinCodeRange(len(loop_header + code_bytes)), izip(byte_increments, line_increments))))) ++ line_numbers_table = bytes(bytearray(chain.from_iterable(takewhile(WithinCodeRange(len(loop_header + code_bytes)), zip(byte_increments, line_increments))))) + + return code(o_code.co_argcount, o_code.co_nlocals, o_code.co_stacksize, o_code.co_flags, new_code_bytes, code_constants, names, o_code.co_varnames, + o_code.co_filename, o_code.co_name, o_code.co_firstlineno + line_offset - 1, line_numbers_table, o_code.co_freevars, o_code.co_cellvars) +@@ -312,10 +312,10 @@ class TimeProbe(object): + error_string = '' + if self.description is not None: + # format_string = u'{:.{precision}g} {}{}; {description}' +- format_string = u'{description}: {:.{precision}g} {}{}' ++ format_string = '{description}: {:.{precision}g} {}{}' + else: +- format_string = u'{:.{precision}g} {}{}' +- print format_string.format(normalized_time, time_unit, error_string, description=self.description, precision=3) ++ format_string = '{:.{precision}g} {}{}' ++ print(format_string.format(normalized_time, time_unit, error_string, description=self.description, precision=3)) + del self._start_time + + time_probe = TimeProbe +@@ -357,7 +357,7 @@ class _MeasurementProbe(object): + gc_enabled = gc.isenabled() + gc.disable() + try: +- return _MeasurementSamples(self.get_sample() for _ in xrange(iterations)) ++ return _MeasurementSamples(self.get_sample() for _ in range(iterations)) + finally: + if gc_enabled: + gc.enable() +--- application/log/__init__.py.orig 2020-03-02 11:53:05 UTC ++++ application/log/__init__.py +@@ -145,9 +145,7 @@ logging.Logger.exception = Logger.exception.__func__ + logging.exception = exception + + +-class ContextualLogger(object): +- __metaclass__ = abc.ABCMeta +- ++class ContextualLogger(object, metaclass=abc.ABCMeta): + def __init__(self, logger, **context): + self.logger = logger + self.__dict__.update(context) +@@ -239,7 +237,7 @@ class LevelHandler(object): + + @property + def named_levels(self): +- return {self.NOTSET, self.DEBUG, self.INFO, self.WARNING, self.ERROR, self.CRITICAL} | {item for item in self.__dict__.values() if isinstance(item, NamedLevel)} ++ return {self.NOTSET, self.DEBUG, self.INFO, self.WARNING, self.ERROR, self.CRITICAL} | {item for item in list(self.__dict__.values()) if isinstance(item, NamedLevel)} + + def __setattr__(self, name, value): + if isinstance(value, NamedLevel) and value not in self.named_levels: +@@ -273,7 +271,7 @@ class SyslogHandler(logging.Handler): + try: + priority = self.priority_map.get(record.levelno, syslog.LOG_INFO) + message = self.format(record) +- if isinstance(message, unicode): ++ if isinstance(message, str): + message = message.encode('UTF-8') + for line in message.rstrip().replace('\0', '#000').split('\n'): # syslog.syslog() raises TypeError if null bytes are present in the message + syslog.syslog(priority, line) +@@ -322,7 +320,7 @@ class StandardIOLogger(io.IOBase): + + def write(self, string): + self._checkClosed() +- if isinstance(string, unicode): ++ if isinstance(string, str): + string = string.encode(self._encoding) + lines = (self._buffer + string).split('\n') + self._buffer = lines[-1] +@@ -332,7 +330,7 @@ class StandardIOLogger(io.IOBase): + def writelines(self, lines): + self._checkClosed() + for line in lines: +- if isinstance(line, unicode): ++ if isinstance(line, str): + line = line.encode(self._encoding) + self._logger(line) + +@@ -340,7 +338,7 @@ class StandardIOLogger(io.IOBase): + class WhenNotInteractive(object): + """True when running under a non-interactive interpreter and False otherwise""" + +- def __nonzero__(self): ++ def __bool__(self): + return hasattr(__main__, '__file__') or getattr(sys, 'frozen', False) + + def __repr__(self): +--- application/log/extensions/twisted/__init__.py.orig 2017-06-25 16:08:39 UTC ++++ application/log/extensions/twisted/__init__.py +@@ -1,5 +1,5 @@ + +-from __future__ import absolute_import ++ + + import os + import sys +--- application/log/extensions/twisted/twisted.py.orig 2017-06-24 11:41:15 UTC ++++ application/log/extensions/twisted/twisted.py +@@ -1,5 +1,5 @@ + +-from __future__ import absolute_import ++ + + import os + import sys +--- application/notification.py.orig 2019-07-30 18:57:40 UTC ++++ application/notification.py +@@ -17,14 +17,12 @@ from application.python.weakref import weakobjectmap + __all__ = 'Any', 'UnknownSender', 'IObserver', 'NotificationData', 'Notification', 'NotificationCenter', 'ObserverWeakrefProxy' + + +-class Any(object): ++class Any(object, metaclass=MarkerType): + """Any sender or notification name""" +- __metaclass__ = MarkerType + + +-class UnknownSender(object): ++class UnknownSender(object, metaclass=MarkerType): + """A special sender used for anonymous notifications""" +- __metaclass__ = MarkerType + + + class IObserver(Interface): +@@ -61,7 +59,7 @@ class ObserverWeakrefProxy(object): + # noinspection PyUnusedLocal + def cleanup(self, ref): + # remove all observer's remaining registrations (the ones that the observer didn't remove itself) +- for notification_center in NotificationCenter.__instances__.itervalues(): ++ for notification_center in NotificationCenter.__instances__.values(): + notification_center.purge_observer(self) + + def handle_notification(self, notification): +@@ -77,7 +75,7 @@ class NotificationData(object): + self.__dict__.update(kwargs) + + def __repr__(self): +- return '%s(%s)' % (self.__class__.__name__, ', '.join('%s=%r' % (name, value) for name, value in self.__dict__.iteritems())) ++ return '%s(%s)' % (self.__class__.__name__, ', '.join('%s=%r' % (name, value) for name, value in self.__dict__.items())) + + + class Notification(object): +@@ -103,15 +101,13 @@ class Notification(object): + return '%s(%r, %r, %r)' % (self.__class__.__name__, self.name, self.sender, self.data) + + +-class NotificationCenter(object): ++class NotificationCenter(object, metaclass=Singleton): + """ + A NotificationCenter allows observers to subscribe to receive notifications + identified by name and sender and will distribute the posted notifications + according to those subscriptions. + """ + +- __metaclass__ = Singleton +- + queue = ThreadLocal(deque) + + def __init__(self, name='default'): +@@ -178,7 +174,7 @@ class NotificationCenter(object): + def purge_observer(self, observer): + """Remove all the observer's subscriptions.""" + with self.lock: +- subscriptions = [(key, observer_set) for key, observer_set in self.observers.iteritems() if observer in observer_set] ++ subscriptions = [(key, observer_set) for key, observer_set in self.observers.items() if observer in observer_set] + for key, observer_set in subscriptions: + observer_set.remove(observer) + if not observer_set: +--- application/process.py.orig 2019-08-20 09:13:31 UTC ++++ application/process.py +@@ -126,11 +126,9 @@ class RuntimeSettings(object): + raise ProcessError('lacking permissions to access the runtime directory at %s' % directory) + + +-class Process(object): ++class Process(object, metaclass=Singleton): + """Control how the current process runs and interacts with the operating system""" + +- __metaclass__ = Singleton +- + def __init__(self): + self._daemon = False + self._pidfile = None +@@ -290,10 +288,8 @@ class Process(object): + raise RuntimeError('Network is not available after waiting for {} seconds'.format(wait_time)) + + +-class Signals(object): ++class Signals(object, metaclass=Singleton): + """Interface to the system signals""" +- +- __metaclass__ = Singleton + + def __init__(self): + self._handlers = {} +--- application/python/__init__.py.orig 2019-06-03 16:59:08 UTC ++++ application/python/__init__.py +@@ -1,7 +1,7 @@ + + """Python language extensions""" + +-from __builtin__ import min as minimum, max as maximum ++from builtins import min as minimum, max as maximum + from application.python.types import NullType + + +--- application/python/decorator.py.orig 2020-03-12 00:03:13 UTC ++++ application/python/decorator.py +@@ -20,7 +20,7 @@ def preserve_signature(func): + def fix_signature(wrapper): + exec_scope = {} + parameters = formatargspec(*getargspec(func), formatvalue=lambda value: '') +- exec 'def {0}{1}: return wrapper{1}'.format(func.__name__, parameters) in {'wrapper': wrapper}, exec_scope # can't use tuple form here (see https://bugs.python.org/issue21591) ++ exec('def {0}{1}: return wrapper{1}'.format(func.__name__, parameters), {'wrapper': wrapper}, exec_scope) # can't use tuple form here (see https://bugs.python.org/issue21591) + new_wrapper = exec_scope.pop(func.__name__) + new_wrapper.__name__ = func.__name__ + new_wrapper.__doc__ = func.__doc__ +@@ -50,12 +50,12 @@ def execute_once(func): + def __call__(self, *args, **kw): + with self.im_func_wrapper.lock: + method = self.__method__ +- check_arguments.__get__(method.im_self, method.im_class)(*args, **kw) +- instance = method.im_self if method.im_self is not None else args[0] ++ check_arguments.__get__(method.__self__, method.__self__.__class__)(*args, **kw) ++ instance = method.__self__ if method.__self__ is not None else args[0] + if self.im_func_wrapper.__callmap__.get(instance, False): + return + self.im_func_wrapper.__callmap__[instance] = True +- self.im_func_wrapper.__callmap__[method.im_class] = True ++ self.im_func_wrapper.__callmap__[method.__self__.__class__] = True + return method.__call__(*args, **kw) + + def __dir__(self): +@@ -85,7 +85,7 @@ def execute_once(func): + + @property + def called(self): +- return self.im_func_wrapper.__callmap__.get(self.__method__.im_self if self.__method__.im_self is not None else self.__method__.im_class, False) ++ return self.im_func_wrapper.__callmap__.get(self.__method__.__self__ if self.__method__.__self__ is not None else self.__method__.__self__.__class__, False) + + @property + def lock(self): +--- application/python/queue.py.orig 2019-07-30 10:38:20 UTC ++++ application/python/queue.py +@@ -1,7 +1,7 @@ + + """Event processing queues, that process the events in a distinct thread""" + +-import Queue ++import queue + from threading import Thread, Event, Lock + + from application import log +@@ -13,9 +13,9 @@ __all__ = 'EventQueue', 'CumulativeEventQueue' + + # Special events that control the queue operation (for internal use) + +-class StopProcessing: __metaclass__ = MarkerType +-class ProcessEvents: __metaclass__ = MarkerType +-class DiscardEvents: __metaclass__ = MarkerType ++class StopProcessing(metaclass=MarkerType): pass ++class ProcessEvents(metaclass=MarkerType): pass ++class DiscardEvents(metaclass=MarkerType): pass + + + class EventQueue(Thread): +@@ -31,7 +31,7 @@ class EventQueue(Thread): + self._pause_counter = 0 + self._pause_lock = Lock() + self._accepting_events = True +- self.queue = Queue.Queue() ++ self.queue = queue.Queue() + self.handle = handler + self.load(preload) + self._active.set() +@@ -106,7 +106,7 @@ class EventQueue(Thread): + try: + while True: + self.queue.get_nowait() +- except Queue.Empty: ++ except queue.Empty: + pass + self.unpause() + +@@ -120,7 +120,7 @@ class EventQueue(Thread): + event = self.queue.get_nowait() + if event is not StopProcessing: + unhandled.append(event) +- except Queue.Empty: ++ except queue.Empty: + pass + return unhandled + +--- application/python/threadpool.py.orig 2019-07-30 10:28:55 UTC ++++ application/python/threadpool.py +@@ -1,7 +1,7 @@ + + """A generic, resizable thread pool""" + +-from Queue import Queue ++from queue import Queue + from itertools import count + from threading import Lock, Thread, current_thread + +--- application/python/types.py.orig 2017-06-27 15:56:11 UTC ++++ application/python/types.py +@@ -1,8 +1,8 @@ + + """Types and meta classes""" + +-from __future__ import absolute_import + ++ + from types import FunctionType, UnboundMethodType + from application.python.decorator import preserve_signature + +@@ -26,7 +26,7 @@ class Singleton(type): + # noinspection PyShadowingNames + @preserve_signature(initializer) + def instance_creator(cls, *args, **kw): +- key = (args, tuple(sorted(kw.iteritems()))) ++ key = (args, tuple(sorted(kw.items()))) + try: + hash(key) + except TypeError: +@@ -53,10 +53,8 @@ class NullTypeMeta(type): + return cls.__instance__ + + +-class NullType(object): ++class NullType(object, metaclass=NullTypeMeta): + """Instances of this class always and reliably "do nothing".""" +- +- __metaclass__ = NullTypeMeta + __name__ = 'Null' + + def __init__(self, *args, **kw): +@@ -77,7 +75,7 @@ class NullType(object): + def __len__(self): + return 0 + +- def __nonzero__(self): ++ def __bool__(self): + return False + + def __eq__(self, other): +@@ -125,7 +123,7 @@ class NullType(object): + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + raise StopIteration + + +@@ -140,5 +138,5 @@ class MarkerType(type): + def __repr__(cls): + return cls.__name__ + +- def __nonzero__(cls): ++ def __bool__(cls): + return cls.__boolean__ +--- application/python/weakref.py.orig 2017-06-27 16:26:38 UTC ++++ application/python/weakref.py +@@ -1,6 +1,6 @@ + +-from __future__ import absolute_import + ++ + import weakref + + from collections import MutableMapping, deque +@@ -21,7 +21,7 @@ class objectref(weakref.ref): + + class weakobjectid(long): + def __new__(cls, object, discard_callback): +- instance = long.__new__(cls, id(object)) ++ instance = int.__new__(cls, id(object)) + instance.ref = objectref(object, discard_callback) + return instance + +@@ -72,7 +72,7 @@ class weakobjectmap(MutableMapping): + return id(key) in self.__data__ + + def __iter__(self): +- return self.iterkeys() ++ return iter(self.keys()) + + def __len__(self): + return len(self.__data__) +@@ -84,14 +84,14 @@ class weakobjectmap(MutableMapping): + return self.__class__(self) + + def __deepcopy__(self, memo): +- return self.__class__((key, deepcopy(value, memo)) for key, value in self.iteritems()) ++ return self.__class__((key, deepcopy(value, memo)) for key, value in self.items()) + + def __repr__(self): + with _ReprGuard(self) as guard: + if guard.successive_run: + return '%s({...})' % self.__class__.__name__ + else: +- return '%s({%s})' % (self.__class__.__name__, ', '.join(('%r: %r' % (key, value) for key, value in self.iteritems()))) ++ return '%s({%s})' % (self.__class__.__name__, ', '.join(('%r: %r' % (key, value) for key, value in self.items()))) + + @classmethod + def fromkeys(cls, iterable, value=None): +@@ -107,22 +107,22 @@ class weakobjectmap(MutableMapping): + return self.__class__(self) + + def iterkeys(self): +- return (key for key in (key.ref() for key in self.__data__.keys()) if key is not None) ++ return (key for key in (key.ref() for key in list(self.__data__.keys())) if key is not None) + + def itervalues(self): +- return (value for key, value in ((key.ref(), value) for key, value in self.__data__.items()) if key is not None) ++ return (value for key, value in ((key.ref(), value) for key, value in list(self.__data__.items())) if key is not None) + + def iteritems(self): +- return ((key, value) for key, value in ((key.ref(), value) for key, value in self.__data__.items()) if key is not None) ++ return ((key, value) for key, value in ((key.ref(), value) for key, value in list(self.__data__.items())) if key is not None) + + def keys(self): +- return [key for key in (key.ref() for key in self.__data__.keys()) if key is not None] ++ return [key for key in (key.ref() for key in list(self.__data__.keys())) if key is not None] + + def values(self): +- return [value for key, value in ((key.ref(), value) for key, value in self.__data__.items()) if key is not None] ++ return [value for key, value in ((key.ref(), value) for key, value in list(self.__data__.items())) if key is not None] + + def items(self): +- return [(key, value) for key, value in ((key.ref(), value) for key, value in self.__data__.items()) if key is not None] ++ return [(key, value) for key, value in ((key.ref(), value) for key, value in list(self.__data__.items())) if key is not None] + + def has_key(self, key): + return key in self +--- application/system.py.orig 2019-08-14 12:54:53 UTC ++++ application/system.py +@@ -13,11 +13,9 @@ __all__ = 'host', 'makedirs', 'openfile', 'unlink', 'F + + # System properties and attributes + +-class HostProperties(object): ++class HostProperties(object, metaclass=Singleton): + """Host specific properties""" + +- __metaclass__ = Singleton +- + @staticmethod + def outgoing_ip_for(destination): + try: +@@ -67,7 +65,7 @@ def makedirs(path, mode=0o777): + """Create a directory recursively and ignore error if it already exists""" + try: + os.makedirs(path, mode) +- except OSError, e: ++ except OSError as e: + if e.errno == errno.EEXIST and os.path.isdir(path) and os.access(path, os.R_OK | os.W_OK | os.X_OK): + return + raise +--- application/version.py.orig 2019-07-30 18:58:01 UTC ++++ application/version.py +@@ -23,10 +23,10 @@ class Version(str): + if extraversion is None: + instance = str.__new__(cls, '%d.%d.%d' % (major, minor, micro)) + weight = 0 +- elif isinstance(extraversion, (int, long)): ++ elif isinstance(extraversion, int): + instance = str.__new__(cls, '%d.%d.%d-%d' % (major, minor, micro, extraversion)) + weight = 0 +- elif isinstance(extraversion, basestring): ++ elif isinstance(extraversion, str): + instance = str.__new__(cls, '%d.%d.%d%s' % (major, minor, micro, extraversion)) + match = re.match(r'^[-.]?(?P(pre|rc|alpha|beta|))(?P\d+)$', extraversion) + if match: +@@ -48,7 +48,7 @@ class Version(str): + def parse(cls, value): + if isinstance(value, Version): + return value +- elif not isinstance(value, basestring): ++ elif not isinstance(value, str): + raise TypeError('value should be a string') + if value == 'undefined': + return cls(None, None, None) +@@ -83,7 +83,7 @@ class Version(str): + def __cmp__(self, other): + if isinstance(other, Version): + return cmp(self._version_info, other._version_info) +- elif isinstance(other, basestring): ++ elif isinstance(other, str): + return cmp(str(self), other) + else: + return NotImplemented +--- examples/config.py.orig 2020-02-07 16:34:27 UTC ++++ examples/config.py +@@ -10,9 +10,9 @@ from application.system import host + class Priority(int): + """A numeric priority level. The keywords High, Normal and Low map to certain numeric values.""" + def __new__(cls, value): +- if isinstance(value, (int, long)): ++ if isinstance(value, int): + return int(value) +- elif isinstance(value, basestring): ++ elif isinstance(value, str): + priority_map = {'high': 10, 'normal': 50, 'low': 100} + try: + return priority_map.get(value.lower()) or int(value) +@@ -49,11 +49,11 @@ class StorageConfig(ConfigSection): + + + # Dump the default hardcoded values of the options defined above +-print "Settings before reading the configuration file (default hardcoded values)\n" +-print NetworkConfig +-print +-print StorageConfig +-print ++print("Settings before reading the configuration file (default hardcoded values)\n") ++print(NetworkConfig) ++print() ++print(StorageConfig) ++print() + + # Read the settings from the configuration file into the attributes of our + # configuration classes. The read function takes a configuration file name +@@ -86,11 +86,11 @@ NetworkConfig.read('config.ini', 'Network') + StorageConfig.read('config.ini', 'Storage') + + # Dump the values of the options after they were loaded from the config file +-print "\nSettings after reading the configuration file(s)\n" +-print NetworkConfig +-print +-print StorageConfig +-print ++print("\nSettings after reading the configuration file(s)\n") ++print(NetworkConfig) ++print() ++print(StorageConfig) ++print() + + # Configuration options can be accessed as class attributes + ip = NetworkConfig.ip +@@ -102,8 +102,8 @@ ip = NetworkConfig.ip + + # Here is an example of such a class that will be automatically loaded + +-print "\n------------------------------------\n" +-print "Using __cfgfile__ and __section__ to automatically load sections\n" ++print("\n------------------------------------\n") ++print("Using __cfgfile__ and __section__ to automatically load sections\n") + + + class AutoNetworkConfig(ConfigSection): +@@ -126,12 +126,12 @@ class AutoStorageConfig(ConfigSection): + + + # Dump the values of the options after they were loaded from the config file +-print "Settings in the automatically loaded sections\n" +-print +-print AutoNetworkConfig +-print +-print AutoStorageConfig +-print ++print("Settings in the automatically loaded sections\n") ++print() ++print(AutoNetworkConfig) ++print() ++print(AutoStorageConfig) ++print() + + # We can also get individual settings from a given section. + # +@@ -141,10 +141,10 @@ print + # above with the ConfigSection.read() method) apply here as well. + # + +-print "\n------------------------------------\n" +-print "Reading individual settings from sections without using ConfigSection" ++print("\n------------------------------------\n") ++print("Reading individual settings from sections without using ConfigSection") + + configuration = ConfigFile('config.ini') + + dburi = configuration.get_setting('Storage', 'dburi', type=str, default='undefined') +-print "\nGot dburi directly from Storage section as `%s'\n" % dburi ++print("\nGot dburi directly from Storage section as `%s'\n" % dburi) +--- examples/debug.py.orig 2020-02-07 16:34:01 UTC ++++ examples/debug.py +@@ -10,10 +10,10 @@ s1 = 'abcdef' + s2 = 'ghijkl' + s3 = 'mnopqr' + +-print "" +-print "Timing different methods of adding strings" +-print "------------------------------------------" +-print "" ++print("") ++print("Timing different methods of adding strings") ++print("------------------------------------------") ++print("") + + # the loop count can be explicitly specified, but it's easier to let the + # timer automatically detect the loop count that will keep the total runtime +@@ -44,15 +44,15 @@ class C2(object): + + from application.debug.memory import * + +-print "" +-print "Debugging memory leaks" +-print "----------------------" +-print "" ++print("") ++print("Debugging memory leaks") ++print("----------------------") ++print("") + + a = C1() + del a + +-print "This will reveal no memory references" ++print("This will reveal no memory references") + memory_dump() + + a = C1() +@@ -61,7 +61,7 @@ a.b = b + b.a = a + del a, b + +-print "\n\nThis will reveal a collectable circular reference" ++print("\n\nThis will reveal a collectable circular reference") + memory_dump() + + a = C2() +@@ -70,5 +70,5 @@ a.b = b + b.a = a + del a, b + +-print "\n\nThis will reveal an uncollectable circular reference (mem leak)" ++print("\n\nThis will reveal an uncollectable circular reference (mem leak)") + memory_dump() +--- examples/notification.py.orig 2020-02-07 16:34:21 UTC ++++ examples/notification.py +@@ -8,11 +8,11 @@ from application.notification import IObserver, Notifi + class Sender(object): + def publish(self): + center = NotificationCenter() +- print "Sending notification with name 'simple':" +- print "Expecting CatchAllObserver, SimpleObserver, ObjectObserver and VolatileAllObserver to receive notifications" ++ print("Sending notification with name 'simple':") ++ print("Expecting CatchAllObserver, SimpleObserver, ObjectObserver and VolatileAllObserver to receive notifications") + center.post_notification(name='simple', sender=self) +- print "\nSending notification with name 'complex':" +- print "Expecting CatchAllObserver, ObjectObserver and VolatileAllObserver to receive notifications" ++ print("\nSending notification with name 'complex':") ++ print("Expecting CatchAllObserver, ObjectObserver and VolatileAllObserver to receive notifications") + center.post_notification(name='complex', sender=self, data=NotificationData(timestamp=time(), complex_attribute='complex_value')) + + def __repr__(self): +@@ -22,11 +22,11 @@ class Sender(object): + class AnonymousSender(Sender): + def publish(self): + center = NotificationCenter() +- print "Sending notification with name 'simple':" +- print "Expecting SimpleObserver to receive notifications (CatchAllObserver and VolatileAllObserver have been unregistered)" ++ print("Sending notification with name 'simple':") ++ print("Expecting SimpleObserver to receive notifications (CatchAllObserver and VolatileAllObserver have been unregistered)") + center.post_notification(name='simple') +- print "\nSending notification with name 'empty':" +- print "Expecting no observer to receive notifications (CatchAllObserver and VolatileAllObserver have been unregistered)" ++ print("\nSending notification with name 'empty':") ++ print("Expecting no observer to receive notifications (CatchAllObserver and VolatileAllObserver have been unregistered)") + center.post_notification(name='empty', data=None) + + +@@ -35,15 +35,15 @@ class CatchAllObserver(object): + implements(IObserver) + + def register(self): +- print "Registering CatchAllObserver to receive all notifications" ++ print("Registering CatchAllObserver to receive all notifications") + NotificationCenter().add_observer(self) + + def unregister(self): +- print "Unregistering CatchAllObserver from receiving all notifications" ++ print("Unregistering CatchAllObserver from receiving all notifications") + NotificationCenter().remove_observer(self) + + def handle_notification(self, notification): +- print "In CatchAllObserver got %r" % (notification,) ++ print("In CatchAllObserver got %r" % (notification,)) + + + class SimpleObserver(object): +@@ -51,15 +51,15 @@ class SimpleObserver(object): + implements(IObserver) + + def register(self): +- print "Registering SimpleObserver to receive notifications with name 'simple' from any sender" ++ print("Registering SimpleObserver to receive notifications with name 'simple' from any sender") + NotificationCenter().add_observer(self, name='simple') + + def unregister(self): +- print "Unregistering SimpleObserver from receiving notifications with name 'simple' from any sender" ++ print("Unregistering SimpleObserver from receiving notifications with name 'simple' from any sender") + NotificationCenter().remove_observer(self, name='simple') + + def handle_notification(self, notification): +- print "In SimpleObserver got %r" % (notification,) ++ print("In SimpleObserver got %r" % (notification,)) + + + class ObjectObserver(object): +@@ -70,15 +70,15 @@ class ObjectObserver(object): + self.sender = sender + + def register(self): +- print "Registering ObjectObserver to receive notifications with any name from sender %r" % (self.sender,) ++ print("Registering ObjectObserver to receive notifications with any name from sender %r" % (self.sender,)) + NotificationCenter().add_observer(self, sender=self.sender) + + def unregister(self): +- print "Unregistering ObjectObserver from receiving notifications with any name from sender %r" % (self.sender,) ++ print("Unregistering ObjectObserver from receiving notifications with any name from sender %r" % (self.sender,)) + NotificationCenter().remove_observer(self, sender=self.sender) + + def handle_notification(self, notification): +- print "In ObjectObserver got %r" % (notification,) ++ print("In ObjectObserver got %r" % (notification,)) + + + class VolatileAllObserver(object): +@@ -86,11 +86,11 @@ class VolatileAllObserver(object): + implements(IObserver) + + def __init__(self): +- print "Registering VolatileAllObserver to receive all notifications" ++ print("Registering VolatileAllObserver to receive all notifications") + NotificationCenter().add_observer(ObserverWeakrefProxy(self)) + + def handle_notification(self, notification): +- print "In VolatileAllObserver got %r" % (notification,) ++ print("In VolatileAllObserver got %r" % (notification,)) + + + # instantiate senders +@@ -98,7 +98,7 @@ sender = Sender() + anonymous = AnonymousSender() + + # instantiate the observers and register them +-print "Creating and registering observers:" ++print("Creating and registering observers:") + catchall_observer = CatchAllObserver() + catchall_observer.register() + simple_observer = SimpleObserver() +@@ -108,15 +108,15 @@ object_observer.register() + volatile_observer = VolatileAllObserver() + + # send notifications +-print "\nSending notifications from Sender:" +-print "----------------------------------" ++print("\nSending notifications from Sender:") ++print("----------------------------------") + sender.publish() + +-print "\nUnregistering some observers:" ++print("\nUnregistering some observers:") + catchall_observer.unregister() +-print "Deleting VolatileAllObserver which will automatically unregister it from receiving all notifications" ++print("Deleting VolatileAllObserver which will automatically unregister it from receiving all notifications") + del volatile_observer + +-print "\nSending notifications from AnonymousSender:" +-print "-------------------------------------------" ++print("\nSending notifications from AnonymousSender:") ++print("-------------------------------------------") + anonymous.publish() +--- examples/singleton.py.orig 2020-02-07 16:34:14 UTC ++++ examples/singleton.py +@@ -3,14 +3,12 @@ + from application.python.types import Singleton + + +-class Unique(object): ++class Unique(object, metaclass=Singleton): + """This class has only one instance""" +- __metaclass__ = Singleton + + +-class CustomUnique(object): ++class CustomUnique(object, metaclass=Singleton): + """This class has one instance per __init__ arguments combination""" +- __metaclass__ = Singleton + + def __init__(self, name='default', value=1): + self.name = name +@@ -20,7 +18,7 @@ class CustomUnique(object): + o1 = Unique() + o2 = Unique() + +-print "o1 is o2 (expect True):", o1 is o2 ++print("o1 is o2 (expect True):", o1 is o2) + + co1 = CustomUnique() + co2 = CustomUnique() +@@ -29,8 +27,8 @@ co4 = CustomUnique(name='my name') + co5 = CustomUnique(name='my name', value=2) + co6 = CustomUnique(name='my other name') + +-print "co1 is co2 (expect True):", co1 is co2 +-print "co3 is co4 (expect True):", co3 is co4 +-print "co1 is co3 (expect False):", co1 is co3 +-print "co4 is co5 (expect False):", co4 is co5 +-print "co4 is co6 (expect False):", co4 is co6 ++print("co1 is co2 (expect True):", co1 is co2) ++print("co3 is co4 (expect True):", co3 is co4) ++print("co1 is co3 (expect False):", co1 is co3) ++print("co4 is co5 (expect False):", co4 is co5) ++print("co4 is co6 (expect False):", co4 is co6) diff --git a/devel/py-python-magic/Makefile b/devel/py-python-magic/Makefile index 549e05eb3f2..5ac1e2ccd60 100644 --- a/devel/py-python-magic/Makefile +++ b/devel/py-python-magic/Makefile @@ -1,7 +1,7 @@ # Created by: Kubilay Kocak PORTNAME= python-magic -PORTVERSION= 0.4.15 +PORTVERSION= 0.4.25 CATEGORIES= devel python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,17 +12,14 @@ LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE USES= python:3.6+ -USE_GITHUB= yes # missing test data files -USE_PYTHON= autoplist distutils - -GH_ACCOUNT= ahupp - -# both install module as magic.py -CONFLICTS= ${PYTHON_PKGNAMEPREFIX}magic +USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +USE_GITHUB= yes +GH_ACCOUNT= ahupp + do-test: - @cd ${WRKSRC} && ${PYTHON_CMD} ${PYDISTUTILS_SETUP} test + cd ${WRKSRC} && ${SETENV} LC_ALL=en_US.UTF-8 PYTHONPATH=${STAGEDIR}${PYTHON_SITELIBDIR} ${PYTHON_CMD} -m unittest -v .include diff --git a/devel/py-python-magic/distinfo b/devel/py-python-magic/distinfo index b30107d31e3..7cb151dd2fa 100644 --- a/devel/py-python-magic/distinfo +++ b/devel/py-python-magic/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1555764350 -SHA256 (ahupp-python-magic-0.4.15_GH0.tar.gz) = 6d730389249ab1e34ffb0a3c5beaa44e116687ffa081e0176dab6c59ff271593 -SIZE (ahupp-python-magic-0.4.15_GH0.tar.gz) = 75476 +TIMESTAMP = 1647264572 +SHA256 (ahupp-python-magic-0.4.25_GH0.tar.gz) = 0c1f483995067ffff268103f8bb6860d2f42aa3a5a9b906eaf34bcce1de36329 +SIZE (ahupp-python-magic-0.4.25_GH0.tar.gz) = 864121 diff --git a/devel/py-python-magic/pkg-descr b/devel/py-python-magic/pkg-descr index 76d35d3e32c..dbcb91de39c 100644 --- a/devel/py-python-magic/pkg-descr +++ b/devel/py-python-magic/pkg-descr @@ -1,5 +1,6 @@ -This module uses ctypes to access the libmagic file type identification -library. It makes use of the local magic database and supports both -textual and MIME-type output. +python-magic is a Python interface to the libmagic file type identification +library. libmagic identifies file types by checking their headers according to a +predefined list of file types. This functionality is exposed to the command line +by the Unix command file. WWW: https://github.com/ahupp/python-magic diff --git a/devel/py-pyzipper/Makefile b/devel/py-pyzipper/Makefile index 23d4e8ff402..af20d1e26e8 100644 --- a/devel/py-pyzipper/Makefile +++ b/devel/py-pyzipper/Makefile @@ -16,4 +16,7 @@ USES= python:3.6+,run USE_PYTHON= autoplist distutils NO_ARCH= yes +post-patch: + @${RM} ${WRKSRC}/test/badsyntax_*.py + .include diff --git a/devel/py-qt5-pyqt/Makefile b/devel/py-qt5-pyqt/Makefile index ece697a18ef..db2e44c9420 100644 --- a/devel/py-qt5-pyqt/Makefile +++ b/devel/py-qt5-pyqt/Makefile @@ -1,5 +1,6 @@ PORTNAME= pyqt PORTVERSION= ${PYQT5_VERSION} +PORTREVISION= 1 CATEGORIES= devel python MASTER_SITES= ${MASTER_SITES_PYQT5} PKGNAMEPREFIX= ${PYQT_PY_RELNAME}- diff --git a/devel/py-rapidfuzz/Makefile b/devel/py-rapidfuzz/Makefile index 0f5c9ad0801..fcf4a10b6f5 100644 --- a/devel/py-rapidfuzz/Makefile +++ b/devel/py-rapidfuzz/Makefile @@ -25,5 +25,7 @@ post-install: ${STRIP_CMD} \ ${STAGEDIR}${PYTHON_SITELIBDIR}/${PORTNAME}/*.so \ ${STAGEDIR}${PYTHON_SITELIBDIR}/${PORTNAME}/distance/*.so + ${PYTHON_CMD} -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} + ${PYTHON_CMD} -O -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} .include diff --git a/devel/py-readme-renderer/Makefile b/devel/py-readme-renderer/Makefile index 1f0268edecc..ec3c0a1cce0 100644 --- a/devel/py-readme-renderer/Makefile +++ b/devel/py-readme-renderer/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= readme-renderer -PORTVERSION= 32.0 +PORTVERSION= 34.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -25,6 +25,6 @@ NO_ARCH= yes OPTIONS_DEFINE= MD MD_DESC= Markdown support -MD_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}cmarkgfm>=0.5.0<0.7.0:textproc/py-cmarkgfm@${PY_FLAVOR} +MD_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}cmarkgfm>=0.8.0:textproc/py-cmarkgfm@${PY_FLAVOR} .include diff --git a/devel/py-readme-renderer/distinfo b/devel/py-readme-renderer/distinfo index 089169d5044..054f790915a 100644 --- a/devel/py-readme-renderer/distinfo +++ b/devel/py-readme-renderer/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641045968 -SHA256 (readme_renderer-32.0.tar.gz) = b512beafa6798260c7d5af3e1b1f097e58bfcd9a575da7c4ddd5e037490a5b85 -SIZE (readme_renderer-32.0.tar.gz) = 27812 +TIMESTAMP = 1647264574 +SHA256 (readme_renderer-34.0.tar.gz) = dfb4d17f21706d145f7473e0b61ca245ba58e810cf9b2209a48239677f82e5b0 +SIZE (readme_renderer-34.0.tar.gz) = 28835 diff --git a/devel/py-responses/Makefile b/devel/py-responses/Makefile index e54faf84924..778e128bd3f 100644 --- a/devel/py-responses/Makefile +++ b/devel/py-responses/Makefile @@ -1,7 +1,7 @@ # Created by: Brendan Molloy PORTNAME= responses -PORTVERSION= 0.18.0 +PORTVERSION= 0.19.0 CATEGORIES= devel www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -19,7 +19,8 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}mypy>=0:devel/py-mypy@${PY_FLAVOR} \ TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}coverage>=6.0.0:devel/py-coverage@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}flake8>=0:devel/py-flake8@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}mypy>=0:devel/py-mypy@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pytest>=4.6:devel/py-pytest@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pytest>=7.0.0:devel/py-pytest@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pytest-asyncio>=0:devel/py-pytest-asyncio@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pytest-cov>=0:devel/py-pytest-cov@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pytest-localserver>=0:devel/py-pytest-localserver@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}types-mock>=0:devel/py-types-mock@${PY_FLAVOR} \ diff --git a/devel/py-responses/distinfo b/devel/py-responses/distinfo index 41eeb67795b..3005bdbd0a5 100644 --- a/devel/py-responses/distinfo +++ b/devel/py-responses/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058034 -SHA256 (responses-0.18.0.tar.gz) = 380cad4c1c1dc942e5e8a8eaae0b4d4edf708f4f010db8b7bcfafad1fcd254ff -SIZE (responses-0.18.0.tar.gz) = 45885 +TIMESTAMP = 1647264576 +SHA256 (responses-0.19.0.tar.gz) = 3fc29c3117e14136b833a0a6d4e7f1217c6301bf08b6086db468e12f1e3290e2 +SIZE (responses-0.19.0.tar.gz) = 50031 diff --git a/devel/py-simpleparse/files/patch-setup.py b/devel/py-simpleparse/files/patch-setup.py new file mode 100644 index 00000000000..2d4d816a280 --- /dev/null +++ b/devel/py-simpleparse/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2020-03-07 01:30:49 UTC ++++ setup.py +@@ -29,7 +29,7 @@ if sys.platform == 'win32': + )['define'] = 'BAD_STATIC_FORWARD' + + if __name__ == "__main__": +- packages = find_packages(HERE) ++ packages = find_packages(exclude=['examples*', 'tests*']) + setup ( + name = "SimpleParse", + version = findVersion(), diff --git a/devel/py-simpletal/files/patch-2to3 b/devel/py-simpletal/files/patch-2to3 new file mode 100644 index 00000000000..5e3f954ddeb --- /dev/null +++ b/devel/py-simpletal/files/patch-2to3 @@ -0,0 +1,811 @@ +--- lib/simpletal/simpleTAL.py.orig 2010-09-21 20:02:28 UTC ++++ lib/simpletal/simpleTAL.py +@@ -37,11 +37,11 @@ + try: + import logging + except: +- import DummyLogger as logging ++ from . import DummyLogger as logging + +-import xml.sax, cgi, StringIO, codecs, re, sgmlentitynames, types ++import xml.sax, cgi, io, codecs, re, sgmlentitynames, types + import simpletal, copy, sys +-import FixedHTMLParser ++from . import FixedHTMLParser + + __version__ = simpletal.__version__ + +@@ -61,7 +61,7 @@ try: + except ImportError: + use_dom2sax = 0 + +-import simpleTALES ++from . import simpleTALES + + # Name-space URIs + METAL_NAME_URI="http://xml.zope.org/namespaces/metal" +@@ -144,9 +144,10 @@ class TemplateInterpreter: + self.commandHandler [METAL_DEFINE_SLOT] = self.cmdDefineSlot + self.commandHandler [TAL_NOOP] = self.cmdNoOp + +- def tagAsText (self, (tag,atts), singletonFlag=0): ++ def tagAsText (self, xxx_todo_changeme, singletonFlag=0): + """ This returns a tag as text. + """ ++ (tag,atts) = xxx_todo_changeme + result = ["<"] + result.append (tag) + for attName, attValue in atts: +@@ -273,7 +274,7 @@ class TemplateInterpreter: + self.context.setLocal (args[0], self.repeatVariable.getCurrentValue()) + self.programCounter += 1 + return +- except IndexError, e: ++ except IndexError as e: + # We have finished the repeat + self.repeatVariable = None + self.context.removeRepeat (args[0]) +@@ -310,7 +311,7 @@ class TemplateInterpreter: + if (hasattr (result, "__iter__") and callable (result.__iter__)): + # We can get an iterator! + self.repeatVariable = simpleTALES.IteratorRepeatVariable (result.__iter__()) +- elif (hasattr (result, "next") and callable (result.next)): ++ elif (hasattr (result, "next") and callable (result.__next__)): + # Treat as an iterator + self.repeatVariable = simpleTALES.IteratorRepeatVariable (result) + else: +@@ -322,7 +323,7 @@ class TemplateInterpreter: + + try: + curValue = self.repeatVariable.getCurrentValue() +- except IndexError, e: ++ except IndexError as e: + # The iterator ran out of values before we started - treat as an empty list + self.outputTag = 0 + self.repeatVariable = None +@@ -377,20 +378,20 @@ class TemplateInterpreter: + elif (not resultVal == simpleTALES.DEFAULTVALUE): + # We have a value - let's use it! + attsToRemove [attName]=1 +- if (isinstance (resultVal, types.UnicodeType)): ++ if (isinstance (resultVal, str)): + escapedAttVal = resultVal +- elif (isinstance (resultVal, types.StringType)): ++ elif (isinstance (resultVal, bytes)): + # THIS IS NOT A BUG! + # Use Unicode in the Context object if you are not using Ascii +- escapedAttVal = unicode (resultVal, 'ascii') ++ escapedAttVal = str (resultVal, 'ascii') + else: + # THIS IS NOT A BUG! + # Use Unicode in the Context object if you are not using Ascii +- escapedAttVal = unicode (resultVal) ++ escapedAttVal = str (resultVal) + newAtts.append ((attName, escapedAttVal)) + # Copy over the old attributes + for oldAttName, oldAttValue in self.currentAttributes: +- if (not attsToRemove.has_key (oldAttName)): ++ if (oldAttName not in attsToRemove): + newAtts.append ((oldAttName, oldAttValue)) + self.currentAttributes = newAtts + # Evaluate all other commands +@@ -436,27 +437,27 @@ class TemplateInterpreter: + # End of the macro expansion (if any) so clear the parameters + self.slotParameters = {} + else: +- if (isinstance (resultVal, types.UnicodeType)): ++ if (isinstance (resultVal, str)): + self.file.write (resultVal) +- elif (isinstance (resultVal, types.StringType)): ++ elif (isinstance (resultVal, bytes)): + # THIS IS NOT A BUG! + # Use Unicode in the Context object if you are not using Ascii +- self.file.write (unicode (resultVal, 'ascii')) ++ self.file.write (str (resultVal, 'ascii')) + else: + # THIS IS NOT A BUG! + # Use Unicode in the Context object if you are not using Ascii +- self.file.write (unicode (resultVal)) ++ self.file.write (str (resultVal)) + else: +- if (isinstance (resultVal, types.UnicodeType)): ++ if (isinstance (resultVal, str)): + self.file.write (cgi.escape (resultVal)) +- elif (isinstance (resultVal, types.StringType)): ++ elif (isinstance (resultVal, bytes)): + # THIS IS NOT A BUG! + # Use Unicode in the Context object if you are not using Ascii +- self.file.write (cgi.escape (unicode (resultVal, 'ascii'))) ++ self.file.write (cgi.escape (str (resultVal, 'ascii'))) + else: + # THIS IS NOT A BUG! + # Use Unicode in the Context object if you are not using Ascii +- self.file.write (cgi.escape (unicode (resultVal))) ++ self.file.write (cgi.escape (str (resultVal))) + + if (self.outputTag and not args[1]): + # Do NOT output end tag if a singleton with no content +@@ -535,7 +536,7 @@ class TemplateInterpreter: + If the slotName is filled then that is used, otherwise the original conent + is used. + """ +- if (self.currentSlots.has_key (args[0])): ++ if (args[0] in self.currentSlots): + # This slot is filled, so replace us with that content + self.outputTag = 0 + self.tagContent = (1, self.currentSlots [args[0]]) +@@ -556,14 +557,15 @@ class HTMLTemplateInterpreter (TemplateInterpreter): + # Override the tagAsText method for this instance + self.tagAsText = self.tagAsTextMinimizeAtts + +- def tagAsTextMinimizeAtts (self, (tag,atts), singletonFlag=0): ++ def tagAsTextMinimizeAtts (self, xxx_todo_changeme1, singletonFlag=0): + """ This returns a tag as text. + """ ++ (tag,atts) = xxx_todo_changeme1 + result = ["<"] + result.append (tag) + upperTag = tag.upper() + for attName, attValue in atts: +- if (HTML_BOOLEAN_ATTS.has_key ('%s:%s' % (upperTag, attName.upper()))): ++ if ('%s:%s' % (upperTag, attName.upper()) in HTML_BOOLEAN_ATTS): + # We should output a minimised boolean value + result.append (' ') + result.append (attName) +@@ -587,7 +589,7 @@ class Template: + self.doctype = doctype + + # Setup the macros +- for macro in self.macros.values(): ++ for macro in list(self.macros.values()): + macro.setParentTemplate (self) + + # Setup the slots +@@ -595,7 +597,7 @@ class Template: + if (cmnd == METAL_USE_MACRO): + # Set the parent of each slot + slotMap = arg[1] +- for slot in slotMap.values(): ++ for slot in list(slotMap.values()): + slot.setParentTemplate (self) + + def expand (self, context, outputFile, outputEncoding=None, interpreter=None): +@@ -616,7 +618,7 @@ class Template: + ourInterpreter = interpreter + try: + ourInterpreter.execute (self) +- except UnicodeError, unierror: ++ except UnicodeError as unierror: + logging.error ("UnicodeError caused by placing a non-Unicode string in the Context object.") + raise simpleTALES.ContextContentException ("Found non-unicode string in Context!") + +@@ -632,16 +634,16 @@ class Template: + result = result + "\n[%s] %s" % (str (index), str (cmd)) + else: + result = result + "\n[%s] %s, (%s{" % (str (index), str (cmd[0]), str (cmd[1][0])) +- for slot in cmd[1][1].keys(): ++ for slot in list(cmd[1][1].keys()): + result = result + "%s: %s" % (slot, str (cmd[1][1][slot])) + result = result + "}, %s)" % str (cmd[1][2]) + index += 1 + result = result + "\n\nSymbols:\n" +- for symbol in self.symbolTable.keys(): ++ for symbol in list(self.symbolTable.keys()): + result = result + "Symbol: " + str (symbol) + " points to: " + str (self.symbolTable[symbol]) + ", which is command: " + str (self.commandList[self.symbolTable[symbol]]) + "\n" + + result = result + "\n\nMacros:\n" +- for macro in self.macros.keys(): ++ for macro in list(self.macros.keys()): + result = result + "Macro: " + str (macro) + " value of: " + str (self.macros[macro]) + return result + +@@ -794,9 +796,10 @@ class TemplateCompiler: + newPrefix = self.metal_namespace_prefix_stack.pop() + self.setMETALPrefix (newPrefix) + +- def tagAsText (self, (tag,atts), singletonFlag=0): ++ def tagAsText (self, xxx_todo_changeme2, singletonFlag=0): + """ This returns a tag as text. + """ ++ (tag,atts) = xxx_todo_changeme2 + result = ["<"] + result.append (tag) + for attName, attValue in atts: +@@ -865,7 +868,7 @@ class TemplateCompiler: + popCommandList = tagProperties.get ('popFunctionList', []) + singletonTag = tagProperties.get ('singletonTag', 0) + for func in popCommandList: +- apply (func, ()) ++ func(*()) + self.log.debug ("Popped tag %s off stack" % oldTag[0]) + if (oldTag[0] == tag[0]): + # We've found the right tag, now check to see if we have any TAL commands on it +@@ -967,7 +970,7 @@ class TemplateCompiler: + else: + # It's nothing special, just an ordinary namespace declaration + cleanAttributes.append ((att, value)) +- elif (self.tal_attribute_map.has_key (commandAttName)): ++ elif (commandAttName in self.tal_attribute_map): + # It's a TAL attribute + cmnd = self.tal_attribute_map [commandAttName] + if (cmnd == TAL_OMITTAG and TALElementNameSpace): +@@ -975,7 +978,7 @@ class TemplateCompiler: + else: + foundCommandsArgs [cmnd] = value + foundTALAtts.append (cmnd) +- elif (self.metal_attribute_map.has_key (commandAttName)): ++ elif (commandAttName in self.metal_attribute_map): + # It's a METAL attribute + cmnd = self.metal_attribute_map [commandAttName] + foundCommandsArgs [cmnd] = value +@@ -1184,7 +1187,7 @@ class TemplateCompiler: + msg = "Macro name %s is invalid." % argument + self.log.error (msg) + raise TemplateParseException (self.tagAsText (self.currentStartTag), msg) +- if (self.macroMap.has_key (argument)): ++ if (argument in self.macroMap): + msg = "Macro name %s is already defined!" % argument + self.log.error (msg) + raise TemplateParseException (self.tagAsText (self.currentStartTag), msg) +@@ -1232,7 +1235,7 @@ class TemplateCompiler: + self.log.error (msg) + raise TemplateParseException (self.tagAsText (self.currentStartTag), msg) + +- if (slotMap.has_key (argument)): ++ if (argument in slotMap): + msg = "Slot %s has already been filled!" % argument + self.log.error (msg) + raise TemplateParseException (self.tagAsText (self.currentStartTag), msg) +@@ -1281,14 +1284,15 @@ class HTMLTemplateCompiler (TemplateCompiler, FixedHTM + self.feed (encodedFile.read()) + self.close() + +- def tagAsText (self, (tag,atts), singletonFlag=0): ++ def tagAsText (self, xxx_todo_changeme3, singletonFlag=0): + """ This returns a tag as text. + """ ++ (tag,atts) = xxx_todo_changeme3 + result = ["<"] + result.append (tag) + upperTag = tag.upper() + for attName, attValue in atts: +- if (self.minimizeBooleanAtts and HTML_BOOLEAN_ATTS.has_key ('%s:%s' % (upperTag, attName.upper()))): ++ if (self.minimizeBooleanAtts and '%s:%s' % (upperTag, attName.upper()) in HTML_BOOLEAN_ATTS): + # We should output a minimised boolean value + result.append (' ') + result.append (attName) +@@ -1306,7 +1310,7 @@ class HTMLTemplateCompiler (TemplateCompiler, FixedHTM + + def handle_startendtag (self, tag, attributes): + self.handle_starttag (tag, attributes) +- if not (HTML_FORBIDDEN_ENDTAG.has_key (tag.upper())): ++ if not (tag.upper() in HTML_FORBIDDEN_ENDTAG): + self.handle_endtag(tag) + + def handle_starttag (self, tag, attributes): +@@ -1316,7 +1320,7 @@ class HTMLTemplateCompiler (TemplateCompiler, FixedHTM + # We need to spot empty tal:omit-tags + if (attValue is None): + if (att == self.tal_namespace_omittag): +- atts.append ((att, u"")) ++ atts.append ((att, "")) + else: + atts.append ((att, att)) + else: +@@ -1334,16 +1338,16 @@ class HTMLTemplateCompiler (TemplateCompiler, FixedHTM + refValue = int (ref[3:-1], 16) + else: + refValue = int (ref[2:-1]) +- goodAttValue.append (unichr (refValue)) ++ goodAttValue.append (chr (refValue)) + else: + # A named reference. +- goodAttValue.append (unichr (sgmlentitynames.htmlNameToUnicodeNumber.get (ref[1:-1], 65533))) ++ goodAttValue.append (chr (sgmlentitynames.htmlNameToUnicodeNumber.get (ref[1:-1], 65533))) + last = match.end() + match = ENTITY_REF_REGEX.search (attValue, last) + goodAttValue.append (attValue [last:]) +- atts.append ((att, u"".join (goodAttValue))) ++ atts.append ((att, "".join (goodAttValue))) + +- if (HTML_FORBIDDEN_ENDTAG.has_key (tag.upper())): ++ if (tag.upper() in HTML_FORBIDDEN_ENDTAG): + # This should have no end tag, so we just do the start and suppress the end + self.parseStartTag (tag, atts) + self.log.debug ("End tag forbidden, generating close tag with no output.") +@@ -1353,7 +1357,7 @@ class HTMLTemplateCompiler (TemplateCompiler, FixedHTM + + def handle_endtag (self, tag): + self.log.debug ("Recieved End Tag: " + tag) +- if (HTML_FORBIDDEN_ENDTAG.has_key (tag.upper())): ++ if (tag.upper() in HTML_FORBIDDEN_ENDTAG): + self.log.warn ("HTML 4.01 forbids end tags for the %s element" % tag) + else: + # Normal end tag +@@ -1365,24 +1369,24 @@ class HTMLTemplateCompiler (TemplateCompiler, FixedHTM + # These two methods are required so that we expand all character and entity references prior to parsing the template. + def handle_charref (self, ref): + self.log.debug ("Got Ref: %s", ref) +- self.parseData (unichr (int (ref))) ++ self.parseData (chr (int (ref))) + + def handle_entityref (self, ref): + self.log.debug ("Got Ref: %s", ref) + # Use handle_data so that <&> are re-encoded as required. +- self.handle_data( unichr (sgmlentitynames.htmlNameToUnicodeNumber.get (ref, 65533))) ++ self.handle_data( chr (sgmlentitynames.htmlNameToUnicodeNumber.get (ref, 65533))) + + # Handle document type declarations + def handle_decl (self, data): +- self.parseData (u'' % data) ++ self.parseData ('' % data) + + # Pass comments through un-affected. + def handle_comment (self, data): +- self.parseData (u'' % data) ++ self.parseData ('' % data) + + def handle_pi (self, data): + self.log.debug ("Recieved processing instruction.") +- self.parseData (u'' % data) ++ self.parseData ('' % data) + + def report_unbalanced (self, tag): + self.log.warn ("End tag %s present with no corresponding open tag.") +@@ -1442,7 +1446,7 @@ class XMLTemplateCompiler (TemplateCompiler, xml.sax.h + if (SINGLETON_XML_REGEX.match (xmlText)): + # This is a singleton! + self.singletonElement=1 +- except xml.sax.SAXException, e: ++ except xml.sax.SAXException as e: + # Parser doesn't support this property + pass + # Convert attributes into a list of tuples +@@ -1459,7 +1463,7 @@ class XMLTemplateCompiler (TemplateCompiler, xml.sax.h + + def skippedEntity (self, name): + self.log.info ("Recieved skipped entity: %s" % name) +- self.characters( unichr (sgmlentitynames.htmlNameToUnicodeNumber.get (name, 65533))) ++ self.characters( chr (sgmlentitynames.htmlNameToUnicodeNumber.get (name, 65533))) + + def characters (self, data): + #self.log.debug ("Recieved Real Data: " + data) +@@ -1468,11 +1472,11 @@ class XMLTemplateCompiler (TemplateCompiler, xml.sax.h + + def processingInstruction (self, target, data): + self.log.debug ("Recieved processing instruction.") +- self.parseData (u'' % (target, data)) ++ self.parseData ('' % (target, data)) + + def comment (self, data): + # This is only called if your XML parser supports the LexicalHandler interface. +- self.parseData (u'' % data) ++ self.parseData ('' % data) + + def getTemplate (self): + template = XMLTemplate (self.commandList, self.macroMap, self.symbolLocationTable, self.doctype) +@@ -1483,9 +1487,9 @@ def compileHTMLTemplate (template, inputEncoding="ISO- + To use the resulting template object call: + template.expand (context, outputFile) + """ +- if (isinstance (template, types.StringType) or isinstance (template, types.UnicodeType)): ++ if (isinstance (template, bytes) or isinstance (template, str)): + # It's a string! +- templateFile = StringIO.StringIO (template) ++ templateFile = io.StringIO (template) + else: + templateFile = template + compiler = HTMLTemplateCompiler() +@@ -1497,9 +1501,9 @@ def compileXMLTemplate (template): + To use the resulting template object call: + template.expand (context, outputFile) + """ +- if (isinstance (template, types.StringType)): ++ if (isinstance (template, bytes)): + # It's a string! +- templateFile = StringIO.StringIO (template) ++ templateFile = io.StringIO (template) + else: + templateFile = template + compiler = XMLTemplateCompiler() +--- lib/simpletal/simpleTALES.py.orig 2010-09-21 20:02:28 UTC ++++ lib/simpletal/simpleTALES.py +@@ -38,7 +38,7 @@ import types, sys + try: + import logging + except: +- import DummyLogger as logging ++ from . import DummyLogger as logging + + import simpletal, simpleTAL + +@@ -63,7 +63,7 @@ class ContextVariable: + + def value (self, currentPath=None): + if (callable (self.ourValue)): +- return apply (self.ourValue, ()) ++ return self.ourValue(*()) + return self.ourValue + + def rawValue (self): +@@ -190,8 +190,8 @@ class IteratorRepeatVariable (RepeatVariable): + if (self.iterStatus == 0): + self.iterStatus = 1 + try: +- self.curValue = self.sequence.next() +- except StopIteration, e: ++ self.curValue = next(self.sequence) ++ except StopIteration as e: + self.iterStatus = 2 + raise IndexError ("Repeat Finished") + return self.curValue +@@ -200,8 +200,8 @@ class IteratorRepeatVariable (RepeatVariable): + # Need this for the repeat variable functions. + self.position += 1 + try: +- self.curValue = self.sequence.next() +- except StopIteration, e: ++ self.curValue = next(self.sequence) ++ except StopIteration as e: + self.iterStatus = 2 + raise IndexError ("Repeat Finished") + +@@ -214,7 +214,7 @@ class IteratorRepeatVariable (RepeatVariable): + self.map ['start'] = self.getStart + self.map ['end'] = self.getEnd + # TODO: first and last need to be implemented. +- self.map ['length'] = sys.maxint ++ self.map ['length'] = sys.maxsize + self.map ['letter'] = self.getLowerLetter + self.map ['Letter'] = self.getUpperLetter + self.map ['roman'] = self.getLowerRoman +@@ -233,7 +233,7 @@ class PathFunctionVariable (ContextVariable): + def value (self, currentPath=None): + if (currentPath is not None): + index, paths = currentPath +- result = ContextVariable (apply (self.func, ('/'.join (paths[index:]),))) ++ result = ContextVariable (self.func(*('/'.join (paths[index:]),))) + # Fast track the result + raise result + +@@ -362,7 +362,7 @@ class Context: + else: + # Not specified - so it's a path + return self.evaluatePath (expr) +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + if (suppressException): + return None + raise e +@@ -374,7 +374,7 @@ class Context: + #self.log.debug ("Evaluating python expression %s" % expr) + + globals={} +- for name, value in self.globals.items(): ++ for name, value in list(self.globals.items()): + if (isinstance (value, ContextVariable)): value = value.rawValue() + globals [name] = value + globals ['path'] = self.pythonPathFuncs.path +@@ -384,7 +384,7 @@ class Context: + globals ['test'] = self.pythonPathFuncs.test + + locals={} +- for name, value in self.locals.items(): ++ for name, value in list(self.locals.items()): + if (isinstance (value, ContextVariable)): value = value.rawValue() + locals [name] = value + +@@ -393,7 +393,7 @@ class Context: + if (isinstance (result, ContextVariable)): + return result.value() + return result +- except Exception, e: ++ except Exception as e: + # An exception occured evaluating the template, return the exception as text + self.log.warn ("Exception occurred evaluating python path, exception: " + str (e)) + return "Exception: %s" % str (e) +@@ -406,7 +406,7 @@ class Context: + # Evaluate this path + try: + return self.evaluate (path.strip ()) +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + # Path didn't exist, try the next one + pass + # No paths evaluated - raise exception. +@@ -424,7 +424,7 @@ class Context: + try: + result = self.traversePath (allPaths[0], canCall = 0) + return self.true +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + # Look at the rest of the paths. + pass + +@@ -435,7 +435,7 @@ class Context: + # If this is part of a "exists: path1 | exists: path2" path then we need to look at the actual result. + if (pathResult): + return self.true +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + pass + # If we get this far then there are *no* paths that exist. + return self.false +@@ -446,7 +446,7 @@ class Context: + # The first path is for us + try: + return self.traversePath (allPaths[0], canCall = 0) +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + # Try the rest of the paths. + pass + +@@ -454,7 +454,7 @@ class Context: + # Evaluate this path + try: + return self.evaluate (path.strip ()) +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + pass + # No path evaluated - raise error + raise PATHNOTFOUNDEXCEPTION +@@ -465,7 +465,7 @@ class Context: + # Evaluate what I was passed + try: + pathResult = self.evaluate (expr) +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + # In SimpleTAL the result of "not: no/such/path" should be TRUE not FALSE. + return self.true + +@@ -492,7 +492,7 @@ class Context: + #self.log.debug ("Evaluating String %s" % expr) + result = "" + skipCount = 0 +- for position in xrange (0,len (expr)): ++ for position in range (0,len (expr)): + if (skipCount > 0): + skipCount -= 1 + else: +@@ -510,16 +510,16 @@ class Context: + # Evaluate the path - missing paths raise exceptions as normal. + try: + pathResult = self.evaluate (path) +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + # This part of the path didn't evaluate to anything - leave blank +- pathResult = u'' ++ pathResult = '' + if (pathResult is not None): +- if (isinstance (pathResult, types.UnicodeType)): ++ if (isinstance (pathResult, str)): + result += pathResult + else: + # THIS IS NOT A BUG! + # Use Unicode in Context if you aren't using Ascii! +- result += unicode (pathResult) ++ result += str (pathResult) + skipCount = endPos - position + else: + # It's a variable +@@ -530,18 +530,18 @@ class Context: + # Evaluate the variable - missing paths raise exceptions as normal. + try: + pathResult = self.traversePath (path) +- except PathNotFoundException, e: ++ except PathNotFoundException as e: + # This part of the path didn't evaluate to anything - leave blank +- pathResult = u'' ++ pathResult = '' + if (pathResult is not None): +- if (isinstance (pathResult, types.UnicodeType)): ++ if (isinstance (pathResult, str)): + result += pathResult + else: + # THIS IS NOT A BUG! + # Use Unicode in Context if you aren't using Ascii! +- result += unicode (pathResult) ++ result += str (pathResult) + skipCount = endPos - position - 1 +- except IndexError, e: ++ except IndexError as e: + # Trailing $ sign - just suppress it + self.log.warn ("Trailing $ detected") + pass +@@ -564,19 +564,19 @@ class Context: + path = pathList[0] + if path.startswith ('?'): + path = path[1:] +- if self.locals.has_key(path): ++ if path in self.locals: + path = self.locals[path] + if (isinstance (path, ContextVariable)): path = path.value() +- elif (callable (path)):path = apply (path, ()) ++ elif (callable (path)):path = path(*()) + +- elif self.globals.has_key(path): ++ elif path in self.globals: + path = self.globals[path] + if (isinstance (path, ContextVariable)): path = path.value() +- elif (callable (path)):path = apply (path, ()) ++ elif (callable (path)):path = path(*()) + #self.log.debug ("Dereferenced to %s" % path) +- if self.locals.has_key(path): ++ if path in self.locals: + val = self.locals[path] +- elif self.globals.has_key(path): ++ elif path in self.globals: + val = self.globals[path] + else: + # If we can't find it then raise an exception +@@ -586,20 +586,20 @@ class Context: + #self.log.debug ("Looking for path element %s" % path) + if path.startswith ('?'): + path = path[1:] +- if self.locals.has_key(path): ++ if path in self.locals: + path = self.locals[path] + if (isinstance (path, ContextVariable)): path = path.value() +- elif (callable (path)):path = apply (path, ()) +- elif self.globals.has_key(path): ++ elif (callable (path)):path = path(*()) ++ elif path in self.globals: + path = self.globals[path] + if (isinstance (path, ContextVariable)): path = path.value() +- elif (callable (path)):path = apply (path, ()) ++ elif (callable (path)):path = path(*()) + #self.log.debug ("Dereferenced to %s" % path) + try: + if (isinstance (val, ContextVariable)): temp = val.value((index,pathList)) +- elif (callable (val)):temp = apply (val, ()) ++ elif (callable (val)):temp = val(*()) + else: temp = val +- except ContextVariable, e: ++ except ContextVariable as e: + # Fast path for those functions that return values + return e.value() + +@@ -619,9 +619,9 @@ class Context: + if (canCall): + try: + if (isinstance (val, ContextVariable)): result = val.value((index,pathList)) +- elif (callable (val)):result = apply (val, ()) ++ elif (callable (val)):result = val(*()) + else: result = val +- except ContextVariable, e: ++ except ContextVariable as e: + # Fast path for those functions that return values + return e.value() + else: +@@ -643,7 +643,7 @@ class Context: + vars['attrs'] = None + + # Add all of these to the global context +- for name in vars.keys(): ++ for name in list(vars.keys()): + self.addGlobal (name,vars[name]) + + # Add also under CONTEXTS +--- lib/simpletal/simpleTALUtils.py.orig 2010-09-21 20:02:28 UTC ++++ lib/simpletal/simpleTALUtils.py +@@ -34,7 +34,7 @@ + Module Dependencies: None + """ + +-import StringIO, os, stat, threading, sys, codecs, sgmllib, cgi, re, types ++import io, os, stat, threading, sys, codecs, sgmllib, cgi, re, types + import simpletal, simpleTAL + + __version__ = simpletal.__version__ +@@ -56,12 +56,12 @@ class HTMLStructureCleaner (sgmllib.SGMLParser): + The method returns a unicode string which is suitable for addition to a + simpleTALES.Context object. + """ +- if (isinstance (content, types.StringType)): ++ if (isinstance (content, bytes)): + # Not unicode, convert + converter = codecs.lookup (encoding)[1] +- file = StringIO.StringIO (converter (content)[0]) +- elif (isinstance (content, types.UnicodeType)): +- file = StringIO.StringIO (content) ++ file = io.StringIO (converter (content)[0]) ++ elif (isinstance (content, str)): ++ file = io.StringIO (content) + else: + # Treat it as a file type object - and convert it if we have an encoding + if (encoding is not None): +@@ -70,7 +70,7 @@ class HTMLStructureCleaner (sgmllib.SGMLParser): + else: + file = content + +- self.outputFile = StringIO.StringIO (u"") ++ self.outputFile = io.StringIO ("") + self.feed (file.read()) + self.close() + return self.outputFile.getvalue() +@@ -85,10 +85,10 @@ class HTMLStructureCleaner (sgmllib.SGMLParser): + self.outputFile.write (cgi.escape (data)) + + def handle_charref (self, ref): +- self.outputFile.write (u'&#%s;' % ref) ++ self.outputFile.write ('&#%s;' % ref) + + def handle_entityref (self, ref): +- self.outputFile.write (u'&%s;' % ref) ++ self.outputFile.write ('&%s;' % ref) + + + class FastStringOutput: +@@ -123,7 +123,7 @@ class TemplateCache: + inputEncoding is only used for HTML templates, and should be the encoding that the template + is stored in. + """ +- if (self.templateCache.has_key (name)): ++ if (name in self.templateCache): + template, oldctime = self.templateCache [name] + ctime = os.stat (name)[stat.ST_MTIME] + if (oldctime == ctime): +@@ -136,7 +136,7 @@ class TemplateCache: + def getXMLTemplate (self, name): + """ Name should be the path of an XML template file. + """ +- if (self.templateCache.has_key (name)): ++ if (name in self.templateCache): + template, oldctime = self.templateCache [name] + ctime = os.stat (name)[stat.ST_MTIME] + if (oldctime == ctime): +@@ -164,7 +164,7 @@ class TemplateCache: + tempFile.close() + self.templateCache [name] = (template, os.stat (name)[stat.ST_MTIME]) + self.misses += 1 +- except Exception, e: ++ except Exception as e: + self.cacheLock.release() + raise e + +@@ -216,7 +216,7 @@ class MacroExpansionInterpreter (simpleTAL.TemplateInt + + def cmdOutputStartTag (self, command, args): + newAtts = [] +- for att, value in self.originalAttributes.items(): ++ for att, value in list(self.originalAttributes.items()): + if (self.macroArg is not None and att == "metal:define-macro"): + newAtts.append (("metal:use-macro",self.macroArg)) + elif (self.inMacro and att=="metal:define-slot"): +@@ -251,19 +251,19 @@ class MacroExpansionInterpreter (simpleTAL.TemplateInt + # End of the macro + self.inMacro = 0 + else: +- if (isinstance (resultVal, types.UnicodeType)): ++ if (isinstance (resultVal, str)): + self.file.write (resultVal) +- elif (isinstance (resultVal, types.StringType)): +- self.file.write (unicode (resultVal, 'ascii')) ++ elif (isinstance (resultVal, bytes)): ++ self.file.write (str (resultVal, 'ascii')) + else: +- self.file.write (unicode (str (resultVal), 'ascii')) ++ self.file.write (str (str (resultVal), 'ascii')) + else: +- if (isinstance (resultVal, types.UnicodeType)): ++ if (isinstance (resultVal, str)): + self.file.write (cgi.escape (resultVal)) +- elif (isinstance (resultVal, types.StringType)): +- self.file.write (cgi.escape (unicode (resultVal, 'ascii'))) ++ elif (isinstance (resultVal, bytes)): ++ self.file.write (cgi.escape (str (resultVal, 'ascii'))) + else: +- self.file.write (cgi.escape (unicode (str (resultVal), 'ascii'))) ++ self.file.write (cgi.escape (str (str (resultVal), 'ascii'))) + + if (self.outputTag and not args[1]): + self.file.write ('') +@@ -279,7 +279,7 @@ class MacroExpansionInterpreter (simpleTAL.TemplateInt + self.programCounter += 1 + + def ExpandMacros (context, template, outputEncoding="ISO-8859-1"): +- out = StringIO.StringIO() ++ out = io.StringIO() + interp = MacroExpansionInterpreter() + interp.initialise (context, out) + template.expand (context, out, outputEncoding=outputEncoding, interpreter=interp) diff --git a/devel/py-stsci.distutils/files/patch-2to3 b/devel/py-stsci.distutils/files/patch-2to3 new file mode 100644 index 00000000000..6f8f0536a9c --- /dev/null +++ b/devel/py-stsci.distutils/files/patch-2to3 @@ -0,0 +1,144 @@ +--- stsci/distutils/command/build_optional_ext.py.orig 2013-12-23 23:22:38 UTC ++++ stsci/distutils/command/build_optional_ext.py +@@ -3,7 +3,7 @@ from distutils.command.build_ext import build_ext + from distutils.errors import DistutilsError, CCompilerError, CompileError + from distutils.util import strtobool + +-from ConfigParser import ConfigParser ++from configparser import ConfigParser + + + class build_optional_ext(build_ext): +@@ -33,7 +33,7 @@ class build_optional_ext(build_ext): + cfg = ConfigParser() + try: + cfg.read('setup.cfg') +- except Exception, e: ++ except Exception as e: + log.warn('Failed to read setup.cfg: %s; proceeding as though ' + 'there are no optional extensions' % e) + return +@@ -75,7 +75,7 @@ class build_optional_ext(build_ext): + def build_extension(self, ext): + try: + build_ext.build_extension(self, ext) +- except (CCompilerError, DistutilsError, CompileError), e: ++ except (CCompilerError, DistutilsError, CompileError) as e: + if not hasattr(ext, '_optional') or not ext._optional: + raise + log.warn('building optional extension "%s" failed: %s' % +--- stsci/distutils/release.py.orig 2013-12-23 23:22:38 UTC ++++ stsci/distutils/release.py +@@ -6,7 +6,7 @@ import os + import shutil + import sys + +-from ConfigParser import ConfigParser ++from configparser import ConfigParser + + from setuptools.dist import Distribution + from zest.releaser.utils import ask +@@ -105,7 +105,7 @@ def add_to_stsci_package_index(data): + answer = '' + while not answer: + try: +- answer = raw_input(question).strip() ++ answer = input(question).strip() + if not answer: + if package_path: + # The user simple pressed enter, so use the supplied +@@ -114,13 +114,13 @@ def add_to_stsci_package_index(data): + else: + continue + if not os.path.exists(answer): +- print ('The supplied path %s does not exist. Please enter a ' +- 'different path or press Ctrl-C to cancel.' % answer) ++ print(('The supplied path %s does not exist. Please enter a ' ++ 'different path or press Ctrl-C to cancel.' % answer)) + if not os.access(answer, os.W_OK): +- print ('The supplied path %s is not writeable. Either change ' ++ print(('The supplied path %s is not writeable. Either change ' + 'the permissions of the directory or have someone ' + 'grant you access and try again, enter a different ' +- 'directory, or press Ctrl-C to cancel.' % answer) ++ 'directory, or press Ctrl-C to cancel.' % answer)) + package_path = answer + break + # The default was not supplied, so keep asking +@@ -141,15 +141,15 @@ def add_to_stsci_package_index(data): + '*.zip'))[0] + except IndexError: + try: +- print ( ++ print(( + "Could not find a source distribution in %s; did you " + "do a source checkout for upload? If possible, try " + "to cd to %s and manually create a source " + "distribution by running `python setup.py sdist`. " + "Then press enter to try again (or hit Ctrl-C to " + "cancel). Go ahead, I'll wait..." % +- (data['tagdir'], data['tagdir'])) +- raw_input() ++ (data['tagdir'], data['tagdir']))) ++ input() + except KeyboardInterrupt: + return + +@@ -166,12 +166,12 @@ def add_to_stsci_package_index(data): + dist.fetch_build_eggs(['basketweaver']) + except: + # There are so many things that could possibly go wrong here... +- print ('Failed to get basketweaver, which is required to rebuild ' ++ print(('Failed to get basketweaver, which is required to rebuild ' + 'the package index. To manually complete the release, ' + 'install basketweaver manually, then copy %s into %s, cd ' + 'to %s, and then run `makeindex *`, where makeindex is the ' + 'command installed by basketweaver.' % +- (sdist_file, package_path, package_path)) ++ (sdist_file, package_path, package_path))) + import basketweaver.makeindex + + # Now we should have everything we need... +@@ -183,4 +183,4 @@ def add_to_stsci_package_index(data): + finally: + os.chdir(old_cwd) + +- print 'Finished adding package to %s.' % PACKAGE_INDEX_URL ++ print('Finished adding package to %s.' % PACKAGE_INDEX_URL) +--- stsci/distutils/tests/__init__.py.orig 2013-12-23 23:22:38 UTC ++++ stsci/distutils/tests/__init__.py +@@ -7,6 +7,7 @@ import tempfile + import nose + + from .util import reload, rmtree ++import importlib + + + TESTPACKAGE_URL = ('https://svn.stsci.edu/svn/ssb/stsci_python/' +@@ -24,7 +25,7 @@ class StsciDistutilsTestCase(object): + 'checkout', TESTPACKAGE_URL, cls.wc_dir], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) +- except OSError, e: ++ except OSError as e: + raise nose.SkipTest('svn unavailable to checkout out test ' + 'package: %s' % e) + +@@ -48,7 +49,7 @@ class StsciDistutilsTestCase(object): + # package's __path__ since it's already been imported. + if 'stsci' in sys.modules: + # Clean the existing __path__ up +- reload(sys.modules['stsci']) ++ importlib.reload(sys.modules['stsci']) + sys.modules['stsci'].__path__.insert( + 0, os.path.join(self.package_dir, 'stsci')) + +--- stsci/distutils/tests/testpackage/setup.py.orig 2013-12-23 23:22:38 UTC ++++ stsci/distutils/tests/testpackage/setup.py +@@ -12,6 +12,5 @@ setup( + namespace_packages=['stsci'], packages=['stsci'], + dependency_links=['http://stsdas.stsci.edu/download/packages'], + d2to1=True, +- use_2to3=True, + zip_safe=False, + ) diff --git a/devel/py-testoob/Makefile b/devel/py-testoob/Makefile index b0783cdc523..0f181e61f91 100644 --- a/devel/py-testoob/Makefile +++ b/devel/py-testoob/Makefile @@ -18,7 +18,7 @@ OPTIONS_DEFINE= TWISTED TWISTED_DESC= enable running in threads NO_ARCH= yes -USES= python:3.6+ tar:bzip2 +USES= dos2unix python:3.6+ tar:bzip2 USE_PYTHON= autoplist distutils TWISTED_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}twisted>=0:devel/py-twisted@${PY_FLAVOR} diff --git a/devel/py-testoob/files/patch-2to3 b/devel/py-testoob/files/patch-2to3 new file mode 100644 index 00000000000..3efb8ad3035 --- /dev/null +++ b/devel/py-testoob/files/patch-2to3 @@ -0,0 +1,1689 @@ +--- src/testoob/asserter.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/asserter.py +@@ -32,7 +32,7 @@ class Asserter: + # Prevent recursion (accures in testoob tests, when ran with testoob :-) ). + if getattr(Class, method_name).__name__ == "_assert_reporting_func": + return +- variables = eval("Class.%s" % method_name).func_code.co_varnames ++ variables = eval("Class.%s" % method_name).__code__.co_varnames + setattr(Class, "_real_function_%s" % method_name, eval("Class.%s" % method_name)) + method = eval("Class._real_function_%s" % method_name) + def _assert_reporting_func(*args, **kwargs): +@@ -45,7 +45,7 @@ class Asserter: + num_free_args -= 1 + additional_args = args[num_free_args:] + additional_args + # Can't be a dictionary, because the order matters. +- varList = zip(variables[1:], (args[1:num_free_args] + additional_args)) ++ varList = list(zip(variables[1:], (args[1:num_free_args] + additional_args))) + # Here is some evil did to find the function which called me. + test = sys._getframe().f_back.f_locals["self"] + # If we run something that has no reporter, it should just run +@@ -54,7 +54,7 @@ class Asserter: + return method(*args, **kwargs) + try: + method(*args, **kwargs) +- except Exception, e: ++ except Exception as e: + self._reporters[test].addAssert(test, method_name, varList, e) + raise + self._reporters[test].addAssert(test, method_name, varList, None) +--- src/testoob/commandline/__init__.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/commandline/__init__.py +@@ -17,8 +17,8 @@ def module_list(): + + def load_options(): + for module in module_list(): +- exec "import %s" % module ++ exec("import %s" % module) + + load_options() + +-import parsing ++from . import parsing +--- src/testoob/compatibility/itertools.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/compatibility/itertools.py +@@ -20,8 +20,8 @@ takewhile(pred, seq) --> seq[0], seq[1], until pred fa + dropwhile(pred, seq) --> seq[n], seq[n+1], starting when pred fails + groupby(iterable[, keyfunc]) --> sub-iterators grouped by value of keyfunc(v) + """ +-from __future__ import generators + ++ + __all__ = ['chain', 'count', 'cycle', 'dropwhile', 'groupby', 'ifilter', + 'ifilterfalse', 'imap', 'islice', 'izip', 'repeat', 'starmap', + 'takewhile', 'tee'] +@@ -48,14 +48,14 @@ class chain: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + try: +- next_elt = self._cur_iterable_iter.next() ++ next_elt = next(self._cur_iterable_iter) + except StopIteration: + # The current list's iterator is exhausted, switch to next one +- self._cur_iterable_iter = iter(self._iterables_iter.next()) ++ self._cur_iterable_iter = iter(next(self._iterables_iter)) + try: +- next_elt = self._cur_iterable_iter.next() ++ next_elt = next(self._cur_iterable_iter) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % \ +@@ -92,7 +92,7 @@ class count: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + self.times += 1 + return self.times + +@@ -125,15 +125,15 @@ class cycle: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + # XXX Could probably be improved + try: +- next_elt = self._cur_iter.next() ++ next_elt = next(self._cur_iter) + if self._must_save: + self._saved.append(next_elt) + except StopIteration: + self._cur_iter = iter(self._saved) +- next_elt = self._cur_iter.next() ++ next_elt = next(self._cur_iter) + self._must_save = False + except AttributeError: + # CPython raises a TypeError when next() is not defined +@@ -167,9 +167,9 @@ class dropwhile: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + try: +- value = self._iter.next() ++ value = next(self._iter) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % \ +@@ -177,7 +177,7 @@ class dropwhile: + if self._dropped: + return value + while self._predicate(value): +- value = self._iter.next() ++ value = next(self._iter) + self._dropped = True + return value + +@@ -205,15 +205,15 @@ class groupby: + key = lambda x: x + self.keyfunc = key + self.it = iter(iterable) +- self.tgtkey = self.currkey = self.currvalue = xrange(0) ++ self.tgtkey = self.currkey = self.currvalue = range(0) + + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + while self.currkey == self.tgtkey: + try: +- self.currvalue = self.it.next() # Exit on StopIteration ++ self.currvalue = next(self.it) # Exit on StopIteration + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % \ +@@ -225,7 +225,7 @@ class groupby: + def _grouper(self, tgtkey): + while self.currkey == tgtkey: + yield self.currvalue +- self.currvalue = self.it.next() # Exit on StopIteration ++ self.currvalue = next(self.it) # Exit on StopIteration + self.currkey = self.keyfunc(self.currvalue) + + +@@ -257,9 +257,9 @@ class ifilter(_ifilter_base): + if predicate(x): + yield x + """ +- def next(self): ++ def __next__(self): + try: +- next_elt = self._iter.next() ++ next_elt = next(self._iter) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % \ +@@ -267,7 +267,7 @@ class ifilter(_ifilter_base): + while True: + if self._predicate(next_elt): + return next_elt +- next_elt = self._iter.next() ++ next_elt = next(self._iter) + + class ifilterfalse(_ifilter_base): + """Make an iterator that filters elements from iterable returning +@@ -283,9 +283,9 @@ class ifilterfalse(_ifilter_base): + if not predicate(x): + yield x + """ +- def next(self): ++ def __next__(self): + try: +- next_elt = self._iter.next() ++ next_elt = next(self._iter) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % \ +@@ -293,7 +293,7 @@ class ifilterfalse(_ifilter_base): + while True: + if not self._predicate(next_elt): + return next_elt +- next_elt = self._iter.next() ++ next_elt = next(self._iter) + + + +@@ -322,14 +322,14 @@ class imap: + """ + def __init__(self, function, iterable, *other_iterables): + self._func = function +- self._iters = map(iter, (iterable, ) + other_iterables) ++ self._iters = list(map(iter, (iterable, ) + other_iterables)) + + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + try: +- args = [it.next() for it in self._iters] ++ args = [next(it) for it in self._iters] + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % \ +@@ -357,15 +357,15 @@ class islice: + def __init__(self, iterable, *args): + s = slice(*args) + self.start, self.stop, self.step = s.start or 0, s.stop, s.step +- if not isinstance(self.start, (int, long)): ++ if not isinstance(self.start, int): + raise ValueError("Start argument must be an integer") +- if self.stop is not None and not isinstance(self.stop, (int,long)): ++ if self.stop is not None and not isinstance(self.stop, int): + raise ValueError("Stop argument must be an integer or None") + if self.step is None: + self.step = 1 + if self.start<0 or (self.stop is not None and self.stop<0 + ) or self.step<=0: +- raise ValueError, "indices for islice() must be positive" ++ raise ValueError("indices for islice() must be positive") + self.it = iter(iterable) + self.donext = None + self.cnt = 0 +@@ -373,10 +373,10 @@ class islice: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + if self.donext is None: + try: +- self.donext = self.it.next ++ self.donext = self.it.__next__ + except AttributeError: + raise TypeError + while self.cnt < self.start: +@@ -403,17 +403,17 @@ class izip: + yield tuple(result) + """ + def __init__(self, *iterables): +- self._iterators = map(iter, iterables) ++ self._iterators = list(map(iter, iterables)) + self._result = [None] * len(self._iterators) + + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + if not self._iterators: + raise StopIteration() + try: +- return tuple([i.next() for i in self._iterators]) ++ return tuple([next(i) for i in self._iterators]) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % (i)) +@@ -439,7 +439,7 @@ class repeat: + def __init__(self, obj, times=None): + self._obj = obj + if times is not None: +- xrange(times) # Raise a TypeError ++ range(times) # Raise a TypeError + if times < 0: + times = 0 + self._times = times +@@ -447,7 +447,7 @@ class repeat: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + # next() *need* to decrement self._times when consumed + if self._times is not None: + if self._times <= 0: +@@ -489,10 +489,10 @@ class starmap: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + # CPython raises a TypeError when the iterator doesn't return a tuple + try: +- t = self._iter.next() ++ t = next(self._iter) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % self._iter) +@@ -522,9 +522,9 @@ class takewhile: + def __iter__(self): + return self + +- def next(self): ++ def __next__(self): + try: +- value = self._iter.next() ++ value = next(self._iter) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % \ +@@ -544,7 +544,7 @@ class TeeData(object): + # iterates until 'i' if not done yet + while i>= len(self.data): + try: +- self.data.append( self._iter.next() ) ++ self.data.append( next(self._iter) ) + except AttributeError: + # CPython raises a TypeError when next() is not defined + raise TypeError('%s has no next() method' % self._iter) +@@ -565,7 +565,7 @@ class TeeObject(object): + self.tee_data = TeeData(iter(iterable)) + self.pos = 0 + +- def next(self): ++ def __next__(self): + data = self.tee_data[self.pos] + self.pos += 1 + return data +@@ -603,6 +603,6 @@ def tee(iterable, n=2): + if isinstance(iterable, TeeObject): + # a,b = tee(range(10)) ; c,d = tee(a) ; self.assert_(a is c) + return tuple([iterable] + +- [TeeObject(tee_data=iterable.tee_data) for i in xrange(n-1)]) ++ [TeeObject(tee_data=iterable.tee_data) for i in range(n-1)]) + tee_data = TeeData(iter(iterable)) +- return tuple([TeeObject(tee_data=tee_data) for i in xrange(n)]) ++ return tuple([TeeObject(tee_data=tee_data) for i in range(n)]) +--- src/testoob/compatibility/optparse.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/compatibility/optparse.py +@@ -70,7 +70,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH D + + import sys, os + import types +-import textwrap ++from . import textwrap + + class OptParseError (Exception): + def __init__ (self, msg): +@@ -161,10 +161,10 @@ class HelpFormatter: + self.level -= 1 + + def format_usage (self, usage): +- raise NotImplementedError, "subclasses must implement" ++ raise NotImplementedError("subclasses must implement") + + def format_heading (self, heading): +- raise NotImplementedError, "subclasses must implement" ++ raise NotImplementedError("subclasses must implement") + + def format_description (self, description): + desc_width = self.width - self.current_indent +@@ -280,7 +280,7 @@ class TitledHelpFormatter (HelpFormatter): + + + _builtin_cvt = { "int" : (int, "integer"), +- "long" : (long, "long integer"), ++ "long" : (int, "long integer"), + "float" : (float, "floating-point"), + "complex" : (complex, "complex") } + +@@ -434,7 +434,7 @@ class Option: + # Filter out None because early versions of Optik had exactly + # one short option and one long option, either of which + # could be None. +- opts = filter(None, opts) ++ opts = [_f for _f in opts if _f] + if not opts: + raise TypeError("at least one option string must be supplied") + return opts +@@ -462,7 +462,7 @@ class Option: + + def _set_attrs (self, attrs): + for attr in self.ATTRS: +- if attrs.has_key(attr): ++ if attr in attrs: + setattr(self, attr, attrs[attr]) + del attrs[attr] + else: +@@ -472,7 +472,7 @@ class Option: + setattr(self, attr, None) + if attrs: + raise OptionError( +- "invalid keyword arguments: %s" % ", ".join(attrs.keys()), ++ "invalid keyword arguments: %s" % ", ".join(list(attrs.keys())), + self) + + +@@ -507,7 +507,7 @@ class Option: + if self.choices is None: + raise OptionError( + "must supply a list of choices for type 'choice'", self) +- elif type(self.choices) not in (types.TupleType, types.ListType): ++ elif type(self.choices) not in (tuple, list): + raise OptionError( + "choices must be a list of strings ('%s' supplied)" + % str(type(self.choices)).split("'")[1], self) +@@ -547,12 +547,12 @@ class Option: + raise OptionError( + "callback not callable: %r" % self.callback, self) + if (self.callback_args is not None and +- type(self.callback_args) is not types.TupleType): ++ type(self.callback_args) is not tuple): + raise OptionError( + "callback_args, if supplied, must be a tuple: not %r" + % self.callback_args, self) + if (self.callback_kwargs is not None and +- type(self.callback_kwargs) is not types.DictType): ++ type(self.callback_kwargs) is not dict): + raise OptionError( + "callback_kwargs, if supplied, must be a dict: not %r" + % self.callback_kwargs, self) +@@ -636,7 +636,7 @@ class Option: + parser.print_version() + sys.exit(0) + else: +- raise RuntimeError, "unknown action %r" % self.action ++ raise RuntimeError("unknown action %r" % self.action) + + return 1 + +@@ -662,7 +662,7 @@ class Values: + + def __init__ (self, defaults=None): + if defaults: +- for (attr, val) in defaults.items(): ++ for (attr, val) in list(defaults.items()): + setattr(self, attr, val) + + def __repr__ (self): +@@ -677,7 +677,7 @@ class Values: + are silently ignored. + """ + for attr in dir(self): +- if dict.has_key(attr): ++ if attr in dict: + dval = dict[attr] + if dval is not None: + setattr(self, attr, dval) +@@ -696,7 +696,7 @@ class Values: + elif mode == "loose": + self._update_loose(dict) + else: +- raise ValueError, "invalid update mode: %r" % mode ++ raise ValueError("invalid update mode: %r" % mode) + + def read_module (self, modname, mode="careful"): + __import__(modname) +@@ -705,7 +705,7 @@ class Values: + + def read_file (self, filename, mode="careful"): + vars = {} +- execfile(filename, vars) ++ exec(compile(open(filename, "rb").read(), filename, 'exec'), vars) + self._update(vars, mode) + + def ensure_value (self, attr, value): +@@ -775,7 +775,7 @@ class OptionContainer: + + def set_conflict_handler (self, handler): + if handler not in ("ignore", "error", "resolve"): +- raise ValueError, "invalid conflict_resolution value %r" % handler ++ raise ValueError("invalid conflict_resolution value %r" % handler) + self.conflict_handler = handler + + def set_description (self, description): +@@ -787,10 +787,10 @@ class OptionContainer: + def _check_conflict (self, option): + conflict_opts = [] + for opt in option._short_opts: +- if self._short_opt.has_key(opt): ++ if opt in self._short_opt: + conflict_opts.append((opt, self._short_opt[opt])) + for opt in option._long_opts: +- if self._long_opt.has_key(opt): ++ if opt in self._long_opt: + conflict_opts.append((opt, self._long_opt[opt])) + + if conflict_opts: +@@ -817,14 +817,14 @@ class OptionContainer: + """add_option(Option) + add_option(opt_str, ..., kwarg=val, ...) + """ +- if type(args[0]) is types.StringType: ++ if type(args[0]) is bytes: + option = self.option_class(*args, **kwargs) + elif len(args) == 1 and not kwargs: + option = args[0] + if not isinstance(option, Option): +- raise TypeError, "not an Option instance: %r" % option ++ raise TypeError("not an Option instance: %r" % option) + else: +- raise TypeError, "invalid arguments" ++ raise TypeError("invalid arguments") + + self._check_conflict(option) + +@@ -838,7 +838,7 @@ class OptionContainer: + if option.dest is not None: # option has a dest, we need a default + if option.default is not NO_DEFAULT: + self.defaults[option.dest] = option.default +- elif not self.defaults.has_key(option.dest): ++ elif option.dest not in self.defaults: + self.defaults[option.dest] = None + + return option +@@ -854,8 +854,8 @@ class OptionContainer: + self._long_opt.get(opt_str)) + + def has_option (self, opt_str): +- return (self._short_opt.has_key(opt_str) or +- self._long_opt.has_key(opt_str)) ++ return (opt_str in self._short_opt or ++ opt_str in self._long_opt) + + def remove_option (self, opt_str): + option = self._short_opt.get(opt_str) +@@ -1065,16 +1065,16 @@ class OptionParser (OptionContainer): + + def add_option_group (self, *args, **kwargs): + # XXX lots of overlap with OptionContainer.add_option() +- if type(args[0]) is types.StringType: ++ if type(args[0]) is bytes: + group = OptionGroup(self, *args, **kwargs) + elif len(args) == 1 and not kwargs: + group = args[0] + if not isinstance(group, OptionGroup): +- raise TypeError, "not an OptionGroup instance: %r" % group ++ raise TypeError("not an OptionGroup instance: %r" % group) + if group.parser is not self: +- raise ValueError, "invalid OptionGroup (wrong parser)" ++ raise ValueError("invalid OptionGroup (wrong parser)") + else: +- raise TypeError, "invalid arguments" ++ raise TypeError("invalid arguments") + + self.option_groups.append(group) + return group +@@ -1128,7 +1128,7 @@ class OptionParser (OptionContainer): + + try: + stop = self._process_args(largs, rargs, values) +- except (BadOptionError, OptionValueError), err: ++ except (BadOptionError, OptionValueError) as err: + self.error(err.msg) + + args = largs + rargs +@@ -1313,7 +1313,7 @@ class OptionParser (OptionContainer): + or not defined. + """ + if self.usage: +- print >>file, self.get_usage() ++ print(self.get_usage(), file=file) + + def get_version (self): + if self.version: +@@ -1330,7 +1330,7 @@ class OptionParser (OptionContainer): + name. Does nothing if self.version is empty or undefined. + """ + if self.version: +- print >>file, self.get_version() ++ print(self.get_version(), file=file) + + def format_option_help (self, formatter=None): + if formatter is None: +@@ -1381,11 +1381,11 @@ def _match_abbrev (s, wordmap): + 'words', raise BadOptionError. + """ + # Is there an exact match? +- if wordmap.has_key(s): ++ if s in wordmap: + return s + else: + # Isolate all words with s as a prefix. +- possibilities = [word for word in wordmap.keys() ++ possibilities = [word for word in list(wordmap.keys()) + if word.startswith(s)] + # No exact match, so there had better be just one possibility. + if len(possibilities) == 1: +--- src/testoob/compatibility/sets.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/compatibility/sets.py +@@ -54,9 +54,9 @@ what's tested is actually `z in y'. + # - Raymond Hettinger added a number of speedups and other + # improvements. + +-from __future__ import generators ++ + try: +- from itertools import ifilter, ifilterfalse ++ from .itertools import ifilter, ifilterfalse + except ImportError: + # Code to make the module run under Py2.2 + def ifilter(predicate, iterable): +@@ -73,10 +73,6 @@ except ImportError: + for x in iterable: + if not predicate(x): + yield x +- try: +- True, False +- except NameError: +- True, False = (0==0, 0!=0) + + __all__ = ['BaseSet', 'Set', 'ImmutableSet'] + +@@ -91,7 +87,7 @@ class BaseSet(object): + """This is an abstract class.""" + # Don't call this from a concrete subclass! + if self.__class__ is BaseSet: +- raise TypeError, ("BaseSet is an abstract class. " ++ raise TypeError("BaseSet is an abstract class. " + "Use Set or ImmutableSet.") + + # Standard protocols: __len__, __repr__, __str__, __iter__ +@@ -111,7 +107,7 @@ class BaseSet(object): + __str__ = __repr__ + + def _repr(self, sorted=False): +- elements = self._data.keys() ++ elements = list(self._data.keys()) + if sorted: + elements.sort() + return '%s(%r)' % (self.__class__.__name__, elements) +@@ -121,7 +117,7 @@ class BaseSet(object): + + This is the keys iterator for the underlying dict. + """ +- return self._data.iterkeys() ++ return iter(self._data.keys()) + + # Three-way comparison is not supported. However, because __eq__ is + # tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and +@@ -129,7 +125,7 @@ class BaseSet(object): + # case). + + def __cmp__(self, other): +- raise TypeError, "can't compare sets using cmp()" ++ raise TypeError("can't compare sets using cmp()") + + # Equality comparisons using the underlying dicts. Mixed-type comparisons + # are allowed here, where Set == z for non-Set z always returns False, +@@ -231,7 +227,7 @@ class BaseSet(object): + little, big = self, other + else: + little, big = other, self +- common = ifilter(big._data.has_key, little) ++ common = filter(big._data.has_key, little) + return self.__class__(common) + + def __xor__(self, other): +@@ -256,9 +252,9 @@ class BaseSet(object): + otherdata = other._data + except AttributeError: + otherdata = Set(other)._data +- for elt in ifilterfalse(otherdata.has_key, selfdata): ++ for elt in filterfalse(otherdata.has_key, selfdata): + data[elt] = value +- for elt in ifilterfalse(selfdata.has_key, otherdata): ++ for elt in filterfalse(selfdata.has_key, otherdata): + data[elt] = value + return result + +@@ -283,7 +279,7 @@ class BaseSet(object): + except AttributeError: + otherdata = Set(other)._data + value = True +- for elt in ifilterfalse(otherdata.has_key, self): ++ for elt in filterfalse(otherdata.has_key, self): + data[elt] = value + return result + +@@ -309,7 +305,7 @@ class BaseSet(object): + self._binary_sanity_check(other) + if len(self) > len(other): # Fast check for obvious cases + return False +- for elt in ifilterfalse(other._data.has_key, self): ++ for elt in filterfalse(other._data.has_key, self): + return False + return True + +@@ -318,7 +314,7 @@ class BaseSet(object): + self._binary_sanity_check(other) + if len(self) < len(other): # Fast check for obvious cases + return False +- for elt in ifilterfalse(self._data.has_key, other): ++ for elt in filterfalse(self._data.has_key, other): + return False + return True + +@@ -340,7 +336,7 @@ class BaseSet(object): + # Check that the other argument to a binary operation is also + # a set, raising a TypeError otherwise. + if not isinstance(other, BaseSet): +- raise TypeError, "Binary operation only permitted between sets" ++ raise TypeError("Binary operation only permitted between sets") + + def _compute_hash(self): + # Calculate hash code for a set by xor'ing the hash codes of +@@ -438,7 +434,7 @@ class Set(BaseSet): + def __hash__(self): + """A Set cannot be hashed.""" + # We inherit object.__hash__, so we must deny this explicitly +- raise TypeError, "Can't hash a Set, only an ImmutableSet." ++ raise TypeError("Can't hash a Set, only an ImmutableSet.") + + # In-place union, intersection, differences. + # Subtle: The xyz_update() functions deliberately return None, +@@ -501,7 +497,7 @@ class Set(BaseSet): + other = Set(other) + if self is other: + self.clear() +- for elt in ifilter(data.has_key, other): ++ for elt in filter(data.has_key, other): + del data[elt] + + # Python dict-like mass mutations: update, clear +--- src/testoob/compatibility/subprocess.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/compatibility/subprocess.py +@@ -403,13 +403,6 @@ try: + except: + MAXFD = 256 + +-# True/False does not exist on 2.2.0 +-try: +- False +-except NameError: +- False = 0 +- True = 1 +- + _active = [] + + def _cleanup(): +@@ -600,7 +593,7 @@ class Popen(object): + # Detach and turn into fd + p2cwrite = p2cwrite.Detach() + p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) +- elif type(stdin) == types.IntType: ++ elif type(stdin) == int: + p2cread = msvcrt.get_osfhandle(stdin) + else: + # Assuming file-like object +@@ -614,7 +607,7 @@ class Popen(object): + # Detach and turn into fd + c2pread = c2pread.Detach() + c2pread = msvcrt.open_osfhandle(c2pread, 0) +- elif type(stdout) == types.IntType: ++ elif type(stdout) == int: + c2pwrite = msvcrt.get_osfhandle(stdout) + else: + # Assuming file-like object +@@ -630,7 +623,7 @@ class Popen(object): + errread = msvcrt.open_osfhandle(errread, 0) + elif stderr == STDOUT: + errwrite = c2pwrite +- elif type(stderr) == types.IntType: ++ elif type(stderr) == int: + errwrite = msvcrt.get_osfhandle(stderr) + else: + # Assuming file-like object +@@ -673,13 +666,13 @@ class Popen(object): + errread, errwrite): + """Execute program (MS Windows version)""" + +- if not isinstance(args, types.StringTypes): ++ if not isinstance(args, (str,)): + args = list2cmdline(args) + + if shell: + comspec = os.environ.get("COMSPEC", "cmd.exe") + args = comspec + " /c " + args +- if (GetVersion() >= 0x80000000L or ++ if (GetVersion() >= 0x80000000 or + os.path.basename(comspec).lower() == "command.com"): + # Win9x, or using command.com on NT. We need to + # use the w9xpopen intermediate program. For more +@@ -716,7 +709,7 @@ class Popen(object): + env, + cwd, + startupinfo) +- except pywintypes.error, e: ++ except pywintypes.error as e: + # Translate pywintypes.error to WindowsError, which is + # a subclass of OSError. FIXME: We should really + # translate errno using _sys_errlist (or simliar), but +@@ -835,7 +828,7 @@ class Popen(object): + pass + elif stdin == PIPE: + p2cread, p2cwrite = os.pipe() +- elif type(stdin) == types.IntType: ++ elif type(stdin) == int: + p2cread = stdin + else: + # Assuming file-like object +@@ -845,7 +838,7 @@ class Popen(object): + pass + elif stdout == PIPE: + c2pread, c2pwrite = os.pipe() +- elif type(stdout) == types.IntType: ++ elif type(stdout) == int: + c2pwrite = stdout + else: + # Assuming file-like object +@@ -857,7 +850,7 @@ class Popen(object): + errread, errwrite = os.pipe() + elif stderr == STDOUT: + errwrite = c2pwrite +- elif type(stderr) == types.IntType: ++ elif type(stderr) == int: + errwrite = stderr + else: + # Assuming file-like object +@@ -896,7 +889,7 @@ class Popen(object): + errread, errwrite): + """Execute program (POSIX version)""" + +- if isinstance(args, types.StringTypes): ++ if isinstance(args, (str,)): + args = [args] + + if shell: +@@ -1100,8 +1093,8 @@ def _demo_posix(): + # Example 1: Simple redirection: Get process list + # + plist = Popen(["ps"], stdout=PIPE).communicate()[0] +- print "Process list:" +- print plist ++ print("Process list:") ++ print(plist) + + # + # Example 2: Change uid before executing child +@@ -1113,42 +1106,42 @@ def _demo_posix(): + # + # Example 3: Connecting several subprocesses + # +- print "Looking for 'hda'..." ++ print("Looking for 'hda'...") + p1 = Popen(["dmesg"], stdout=PIPE) + p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) +- print repr(p2.communicate()[0]) ++ print(repr(p2.communicate()[0])) + + # + # Example 4: Catch execution error + # +- print +- print "Trying a weird file..." ++ print() ++ print("Trying a weird file...") + try: +- print Popen(["/this/path/does/not/exist"]).communicate() +- except OSError, e: ++ print(Popen(["/this/path/does/not/exist"]).communicate()) ++ except OSError as e: + if e.errno == errno.ENOENT: +- print "The file didn't exist. I thought so..." +- print "Child traceback:" +- print e.child_traceback ++ print("The file didn't exist. I thought so...") ++ print("Child traceback:") ++ print(e.child_traceback) + else: +- print "Error", e.errno ++ print("Error", e.errno) + else: +- print >>sys.stderr, "Gosh. No error." ++ print("Gosh. No error.", file=sys.stderr) + + + def _demo_windows(): + # + # Example 1: Connecting several subprocesses + # +- print "Looking for 'PROMPT' in set output..." ++ print("Looking for 'PROMPT' in set output...") + p1 = Popen("set", stdout=PIPE, shell=True) + p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) +- print repr(p2.communicate()[0]) ++ print(repr(p2.communicate()[0])) + + # + # Example 2: Simple execution of program + # +- print "Executing calc..." ++ print("Executing calc...") + p = Popen("calc") + p.wait() + +--- src/testoob/compatibility/textwrap.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/compatibility/textwrap.py +@@ -10,14 +10,6 @@ __revision__ = "$Id: textwrap.py,v 1.32.8.2 2004/05/13 + + import string, re + +-# Do the right thing with boolean values for all known Python versions +-# (so this module can be copied to projects that don't depend on Python +-# 2.3, e.g. Optik and Docutils). +-try: +- True, False +-except NameError: +- (True, False) = (1, 0) +- + __all__ = ['TextWrapper', 'wrap', 'fill'] + + # Hardcode the recognized whitespace characters to the US-ASCII +@@ -69,7 +61,7 @@ class TextWrapper: + whitespace_trans = string.maketrans(_whitespace, ' ' * len(_whitespace)) + + unicode_whitespace_trans = {} +- uspace = ord(u' ') ++ uspace = ord(' ') + for x in map(ord, _whitespace): + unicode_whitespace_trans[x] = uspace + +@@ -123,7 +115,7 @@ class TextWrapper: + if self.replace_whitespace: + if isinstance(text, str): + text = text.translate(self.whitespace_trans) +- elif isinstance(text, unicode): ++ elif isinstance(text, str): + text = text.translate(self.unicode_whitespace_trans) + return text + +@@ -140,7 +132,7 @@ class TextWrapper: + 'use', ' ', 'the', ' ', '-b', ' ', 'option!' + """ + chunks = self.wordsep_re.split(text) +- chunks = filter(None, chunks) ++ chunks = [_f for _f in chunks if _f] + return chunks + + def _fix_sentence_endings(self, chunks): +--- src/testoob/compatibility/trace.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/compatibility/trace.py +@@ -59,7 +59,7 @@ import types + import gc + + try: +- import cPickle ++ import pickle + pickle = cPickle + except ImportError: + import pickle +@@ -116,11 +116,11 @@ class Ignore: + self._mods = modules or [] + self._dirs = dirs or [] + +- self._dirs = map(os.path.normpath, self._dirs) ++ self._dirs = list(map(os.path.normpath, self._dirs)) + self._ignore = { '': 1 } + + def names(self, filename, modulename): +- if self._ignore.has_key(modulename): ++ if modulename in self._ignore: + return self._ignore[modulename] + + # haven't seen this one before, so see if the module name is +@@ -218,9 +218,9 @@ class CoverageResults: + counts, calledfuncs, callers = \ + pickle.load(open(self.infile, 'rb')) + self.update(self.__class__(counts, calledfuncs, callers)) +- except (IOError, EOFError, ValueError), err: +- print >> sys.stderr, ("Skipping counts file %r: %s" +- % (self.infile, err)) ++ except (IOError, EOFError, ValueError) as err: ++ print(("Skipping counts file %r: %s" ++ % (self.infile, err)), file=sys.stderr) + + def update(self, other): + """Merge in the data from another CoverageResults""" +@@ -231,13 +231,13 @@ class CoverageResults: + other_calledfuncs = other.calledfuncs + other_callers = other.callers + +- for key in other_counts.keys(): ++ for key in list(other_counts.keys()): + counts[key] = counts.get(key, 0) + other_counts[key] + +- for key in other_calledfuncs.keys(): ++ for key in list(other_calledfuncs.keys()): + calledfuncs[key] = 1 + +- for key in other_callers.keys(): ++ for key in list(other_callers.keys()): + callers[key] = 1 + + def write_results(self, show_missing=True, summary=False, coverdir=None): +@@ -245,42 +245,42 @@ class CoverageResults: + @param coverdir + """ + if self.calledfuncs: +- print +- print "functions called:" +- calls = self.calledfuncs.keys() ++ print() ++ print("functions called:") ++ calls = list(self.calledfuncs.keys()) + calls.sort() + for filename, modulename, funcname in calls: +- print ("filename: %s, modulename: %s, funcname: %s" +- % (filename, modulename, funcname)) ++ print(("filename: %s, modulename: %s, funcname: %s" ++ % (filename, modulename, funcname))) + + if self.callers: +- print +- print "calling relationships:" +- calls = self.callers.keys() ++ print() ++ print("calling relationships:") ++ calls = list(self.callers.keys()) + calls.sort() + lastfile = lastcfile = "" + for ((pfile, pmod, pfunc), (cfile, cmod, cfunc)) in calls: + if pfile != lastfile: +- print +- print "***", pfile, "***" ++ print() ++ print("***", pfile, "***") + lastfile = pfile + lastcfile = "" + if cfile != pfile and lastcfile != cfile: +- print " -->", cfile ++ print(" -->", cfile) + lastcfile = cfile +- print " %s.%s -> %s.%s" % (pmod, pfunc, cmod, cfunc) ++ print(" %s.%s -> %s.%s" % (pmod, pfunc, cmod, cfunc)) + + # turn the counts data ("(filename, lineno) = count") into something + # accessible on a per-file basis + per_file = {} +- for filename, lineno in self.counts.keys(): ++ for filename, lineno in list(self.counts.keys()): + lines_hit = per_file[filename] = per_file.get(filename, {}) + lines_hit[lineno] = self.counts[(filename, lineno)] + + # accumulate summary info, if needed + sums = {} + +- for filename, count in per_file.iteritems(): ++ for filename, count in per_file.items(): + # skip some "files" we don't care about... + if filename == "": + continue +@@ -314,29 +314,29 @@ class CoverageResults: + sums[modulename] = n_lines, percent, modulename, filename + + if summary and sums: +- mods = sums.keys() ++ mods = list(sums.keys()) + mods.sort() +- print "lines cov% module (path)" ++ print("lines cov% module (path)") + for m in mods: + n_lines, percent, modulename, filename = sums[m] +- print "%5d %3d%% %s (%s)" % sums[m] ++ print("%5d %3d%% %s (%s)" % sums[m]) + + if self.outfile: + # try and store counts and module info into self.outfile + try: + pickle.dump((self.counts, self.calledfuncs, self.callers), + open(self.outfile, 'wb'), 1) +- except IOError, err: +- print >> sys.stderr, "Can't save counts files because %s" % err ++ except IOError as err: ++ print("Can't save counts files because %s" % err, file=sys.stderr) + + def write_results_file(self, path, lines, lnotab, lines_hit): + """Return a coverage results file in path.""" + + try: + outfile = open(path, "w") +- except IOError, err: +- print >> sys.stderr, ("trace: Could not open %r for writing: %s" +- "- skipping" % (path, err)) ++ except IOError as err: ++ print(("trace: Could not open %r for writing: %s" ++ "- skipping" % (path, err)), file=sys.stderr) + return 0, 0 + + n_lines = 0 +@@ -371,7 +371,7 @@ def find_lines_from_code(code, strs): + + #line_increments = [ord(c) for c in code.co_lnotab[1::2]] + # XXX Replaced above line with Python 2.2-compatible line (orip) +- def odd_indexed_items(seq): return [seq[i] for i in xrange(1, len(seq), 2)] ++ def odd_indexed_items(seq): return [seq[i] for i in range(1, len(seq), 2)] + line_increments = [ord(c) for c in odd_indexed_items(code.co_lnotab)] + + table_length = len(line_increments) +@@ -424,9 +424,9 @@ def find_executable_linenos(filename): + """Return dict where keys are line numbers in the line number table.""" + try: + prog = open(filename, "rU").read() +- except IOError, err: +- print >> sys.stderr, ("Not printing coverage data for %r: %s" +- % (filename, err)) ++ except IOError as err: ++ print(("Not printing coverage data for %r: %s" ++ % (filename, err)), file=sys.stderr) + return {} + code = compile(prog, filename, "exec") + strs = find_strings(filename) +@@ -486,7 +486,7 @@ class Trace: + sys.settrace(self.globaltrace) + threading.settrace(self.globaltrace) + try: +- exec cmd in dict, dict ++ exec(cmd, dict, dict) + finally: + if not self.donothing: + sys.settrace(None) +@@ -499,7 +499,7 @@ class Trace: + sys.settrace(self.globaltrace) + threading.settrace(self.globaltrace) + try: +- exec cmd in globals, locals ++ exec(cmd, globals, locals) + finally: + if not self.donothing: + sys.settrace(None) +@@ -598,8 +598,8 @@ class Trace: + ignore_it = self.ignore.names(filename, modulename) + if not ignore_it: + if self.trace: +- print (" --- modulename: %s, funcname: %s" +- % (modulename, code.co_name)) ++ print((" --- modulename: %s, funcname: %s" ++ % (modulename, code.co_name))) + return self.localtrace + else: + return None +@@ -613,8 +613,8 @@ class Trace: + self.counts[key] = self.counts.get(key, 0) + 1 + + bname = os.path.basename(filename) +- print "%s(%d): %s" % (bname, lineno, +- linecache.getline(filename, lineno)), ++ print("%s(%d): %s" % (bname, lineno, ++ linecache.getline(filename, lineno)), end=' ') + return self.localtrace + + def localtrace_trace(self, frame, why, arg): +@@ -624,8 +624,8 @@ class Trace: + lineno = frame.f_lineno + + bname = os.path.basename(filename) +- print "%s(%d): %s" % (bname, lineno, +- linecache.getline(filename, lineno)), ++ print("%s(%d): %s" % (bname, lineno, ++ linecache.getline(filename, lineno)), end=' ') + return self.localtrace + + def localtrace_count(self, frame, why, arg): +@@ -660,7 +660,7 @@ def main(argv=None): + "coverdir=", "listfuncs", + "trackcalls"]) + +- except getopt.error, msg: ++ except getopt.error as msg: + sys.stderr.write("%s: %s\n" % (sys.argv[0], msg)) + sys.stderr.write("Try `%s --help' for more information\n" + % sys.argv[0]) +@@ -780,7 +780,7 @@ def main(argv=None): + outfile=counts_file) + try: + t.run('execfile(%r)' % (progname,)) +- except IOError, err: ++ except IOError as err: + _err_exit("Cannot run file %r because: %s" % (sys.argv[0], err)) + except SystemExit: + pass +--- src/testoob/coverage.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/coverage.py +@@ -16,6 +16,7 @@ + "Code coverage module" + + import os, sys ++from functools import reduce + + def supported(): + "Is coverage supported?" +@@ -24,7 +25,7 @@ def supported(): + try: + import trace + except ImportError: +- from compatibility import trace ++ from .compatibility import trace + + try: + sum +@@ -41,7 +42,7 @@ except NameError: + from sets import Set as set + except ImportError: + # Python 2.2 compatibility +- from compatibility.sets import Set as set ++ from .compatibility.sets import Set as set + + def _find_executable_linenos(filename): + """ +@@ -50,9 +51,9 @@ def _find_executable_linenos(filename): + """ + try: + prog = open(filename, "rU").read() +- except IOError, err: +- print >> sys.stderr, ("Not printing coverage data for %r: %s" +- % (filename, err)) ++ except IOError as err: ++ print(("Not printing coverage data for %r: %s" ++ % (filename, err)), file=sys.stderr) + return {} + + # Adding trailing EOL if missing +@@ -80,7 +81,7 @@ class Coverage: + # lines - a set of number of executable lines in the file. + # covered - a set of numbers of executed lines in the file. + self.coverage = {} +- self.ignorepaths = map(os.path.abspath, ignorepaths) ++ self.ignorepaths = list(map(os.path.abspath, ignorepaths)) + self.modname = trace.modname + + def runfunc(self, func, *args, **kwargs): +@@ -108,7 +109,7 @@ class Coverage: + which holds the statistics for all the files together. + """ + statistics = {} +- for filename, coverage in self.coverage.items(): ++ for filename, coverage in list(self.coverage.items()): + statistics[filename] = self._single_file_statistics(coverage) + return statistics + +@@ -129,7 +130,7 @@ class Coverage: + def _sum_coverage(self, callable): + "Helper method for _total_{lines,covered}" + return sum([callable(coverage) +- for coverage in self.coverage.values()]) ++ for coverage in list(self.coverage.values())]) + def total_lines(self): + return self._sum_coverage(lambda coverage: len(coverage["lines"])) + def total_lines_covered(self): +@@ -150,7 +151,7 @@ class Coverage: + if not (filename.endswith(".py") or filename.endswith(".pyc")): + return False + +- if not self.coverage.has_key(filename): ++ if filename not in self.coverage: + self.coverage[filename] = { + "lines": set(_find_executable_linenos(filename)), + "covered": set() +--- src/testoob/main.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/main.py +@@ -25,9 +25,9 @@ except NameError: + from sets import Set as set + except ImportError: + # Python 2.2 compatibility +- from compatibility.sets import Set as set ++ from .compatibility.sets import Set as set + +-import commandline ++from . import commandline + import testoob.reporting + + def _arg_parser(): +@@ -82,18 +82,18 @@ def _get_suites(suite, defaultTest, test_names, test_l + + try: + return test_loader.loadTestsFromNames(test_names, __main__) +- except AttributeError, e: ++ except AttributeError as e: + def testName(exception): + import re + mo = re.search("has no attribute '([^']+)'", str(e)) + assert mo is not None + return mo.group(1) + import sys +- print >>sys.stderr, "ERROR: Can't find test case '%s'" % testName(e) ++ print("ERROR: Can't find test case '%s'" % testName(e), file=sys.stderr) + sys.exit(1) + + def _dirname_from_func(func): +- return os.path.dirname(func.func_code.co_filename) ++ return os.path.dirname(func.__code__.co_filename) + + def _coverage_ignore_paths(): + # Ignore coverage from the 'testoob' library (where this file is), and +@@ -105,11 +105,11 @@ def _coverage_ignore_paths(): + python_dirname = _dirname_from_func(os.getenv) + return (testoob_dirname, python_dirname) + +-from commandline.parsing import ArgumentsError ++from .commandline.parsing import ArgumentsError + + def _main(suite, defaultTest, options, test_names, parser): + +- from commandline.parsing import require_posix, require_modules ++ from .commandline.parsing import require_posix, require_modules + + def conflicting_options(*option_names): + given_options = [ +@@ -146,10 +146,10 @@ def _main(suite, defaultTest, options, test_names, par + + def get_test_loader(): + if options.test_method_regex is not None: +- from test_loaders import RegexLoader ++ from .test_loaders import RegexLoader + return RegexLoader(options.test_method_regex) + if options.test_method_glob is not None: +- from test_loaders import GlobLoader ++ from .test_loaders import GlobLoader + return GlobLoader(options.test_method_glob) + return None # use the default + +@@ -157,8 +157,8 @@ def _main(suite, defaultTest, options, test_names, par + suite, defaultTest, test_names, test_loader=get_test_loader()) + + if options.coverage is not None: +- from running import fixture_decorators +- from coverage import Coverage ++ from .running import fixture_decorators ++ from .coverage import Coverage + cov = Coverage(_coverage_ignore_paths()) + kwargs["fixture_decorators"].append( + fixture_decorators.get_coverage_fixture(cov)) +@@ -166,17 +166,17 @@ def _main(suite, defaultTest, options, test_names, par + testoob.reporting.options.coverage = (options.coverage, cov) + + if options.capture is not None: +- from running import fixture_decorators ++ from .running import fixture_decorators + kwargs["fixture_decorators"].append( + fixture_decorators.get_capture_fixture()) + + if options.vassert: +- import asserter ++ from . import asserter + asserter.register_asserter() + + if options.timeout is not None: + require_posix("--timeout") +- from running import fixture_decorators ++ from .running import fixture_decorators + kwargs["fixture_decorators"].append( + fixture_decorators.get_alarmed_fixture(options.timeout)) + def alarm(sig, stack_frame): +@@ -185,10 +185,10 @@ def _main(suite, defaultTest, options, test_names, par + signal.signal(signal.SIGALRM, alarm) + + if options.timeout_with_threads is not None: +- import thread ++ import _thread + if not hasattr(thread, "interrupt_main"): + raise ArgumentsError("Older versions of Python don't support thread.interrupt_main") +- from running import fixture_decorators ++ from .running import fixture_decorators + kwargs["fixture_decorators"].append( + fixture_decorators.get_thread_timingout_fixture(options.timeout_with_threads)) + +@@ -204,15 +204,15 @@ def _main(suite, defaultTest, options, test_names, par + alarm(0) # Don't timeout on debug. + assert flavour in ("error", "failure") + real_add(test, err_info) +- print "\nDebugging for %s in test: %s" % ( +- flavour, reporter.getDescription(test)) ++ print("\nDebugging for %s in test: %s" % ( ++ flavour, reporter.getDescription(test))) + if options.rerun_on_fail is not None: + #test.funcname will be our current test function + #use that to get the function object for our method + #and call it manually. WD-rpw 10-31-06 + methodName = test.funcname() + method = getattr( test.fixture, methodName) +- print "rerunning test for failed %s()" % (methodName) ++ print("rerunning test for failed %s()" % (methodName)) + try: + pdb.runcall( method ) + except: +@@ -223,7 +223,7 @@ def _main(suite, defaultTest, options, test_names, par + kwargs["runDebug"] = runDebug + + if options.threads is not None: +- from running import ThreadedRunner ++ from .running import ThreadedRunner + kwargs["runner"] = ThreadedRunner(num_threads = options.threads) + kwargs["threads"] = True + +@@ -234,7 +234,7 @@ def _main(suite, defaultTest, options, test_names, par + + def text_run_decorator(): + if options.profiler is not None: +- import profiling ++ from . import profiling + return profiling.profiling_decorator( + options.profiler, options.profdata) + +@@ -242,7 +242,7 @@ def _main(suite, defaultTest, options, test_names, par + return lambda x: x + + # apply the decorator to running.text_run +- import running ++ from . import running + return text_run_decorator()(running.text_run)(**kwargs) + + def kwarg_to_option(arg, value): +@@ -257,9 +257,9 @@ def _config_file_args(): + if not os.path.exists(filename): + return [] # No config file + +- import ConfigParser ++ import configparser + try: +- config = ConfigParser.ConfigParser() ++ config = configparser.ConfigParser() + config.read(filename) + + result = [] +@@ -272,7 +272,7 @@ def _config_file_args(): + result.append("--%s=%s" % (option, value)) + + return result +- except ConfigParser.Error, e: ++ except configparser.Error as e: + import warnings + warnings.warn("Error reading config file: %s" % e) + return [] +@@ -286,13 +286,13 @@ def _parse_args(): + + def main(suite=None, defaultTest=None, **kwargs): + import sys +- for arg, value in kwargs.items(): ++ for arg, value in list(kwargs.items()): + sys.argv.append(kwarg_to_option(arg, value)) + + parser, options, test_names = _parse_args() + + try: + sys.exit(not _main(suite, defaultTest, options, test_names, parser)) +- except ArgumentsError, e: ++ except ArgumentsError as e: + parser.error(str(e)) + +--- src/testoob/profiling.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/profiling.py +@@ -28,7 +28,7 @@ def profiling_decorator(profiler_name, filename): + def decorator(callable): + def wrapper(*args, **kwargs): + helper = _helper_class(profiler_name)(filename, callable, *args, **kwargs) +- print "Profiling information saved to file '%s'" % helper.filename ++ print("Profiling information saved to file '%s'" % helper.filename) + helper.run() + helper.print_stats(MAX_PROFILING_LINES_TO_PRINT) + +--- src/testoob/reporting/base.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/reporting/base.py +@@ -123,7 +123,7 @@ class BaseReporter(IReporter): + def startTest(self, test_info): + self.testsRun += 1 + self.asserts[test_info] = [] +- self.start_times[test_info] = _time.time() ++ self.start_times[test_info] = _time.time() + + def stopTest(self, test_info): + # TODO: In Python >= 2.3 can use dict.pop +--- src/testoob/reporting/xslt.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/reporting/xslt.py +@@ -15,7 +15,7 @@ + + "Apply an XSL transformation to XMLReporter's xml output" + +-from xml import XMLReporter ++from .xml import XMLReporter + import time + class XSLTReporter(XMLReporter): + "This reporter uses an XSL transformation scheme to convert an XML output" +@@ -65,7 +65,7 @@ class XSLTReporter(XMLReporter): + def done(self): + XMLReporter.done(self) + xslt_applier = self._create_xslt_applier()(self.converter) +- result = xslt_applier.apply(self.get_xml(), params = {u'date': unicode(time.asctime())}) ++ result = xslt_applier.apply(self.get_xml(), params = {'date': str(time.asctime())}) + open(self.filename, "wt").write(result) + + def _create_xslt_applier(self): +@@ -80,5 +80,5 @@ class XSLTReporter(XMLReporter): + return XSLTReporter.WinCOMXSLTApplier + except: + pass +- raise Exception,"Unable to find supported XSLT library (4Suite, MSXML)" ++ raise Exception("Unable to find supported XSLT library (4Suite, MSXML)") + +--- src/testoob/run_cmd.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/run_cmd.py +@@ -20,7 +20,7 @@ class SubprocessCommandRunner(object): + from subprocess import Popen, PIPE + except ImportError: + # Python 2.2 and 2.3 compatibility +- from compatibility.subprocess import Popen, PIPE ++ from .compatibility.subprocess import Popen, PIPE + self._Popen = Popen + self._PIPE = PIPE + +@@ -49,19 +49,19 @@ class IronPythonCommandRunner(object): + p.StandardInput.Write(input) + p.WaitForExit() + stdout = p.StandardOutput.ReadToEnd() +- stderr = p.StandardError.ReadToEnd() ++ stderr = p.Exception.ReadToEnd() + return stdout, stderr, p.ExitCode + + def _choose_run_command(): + errors = [] + try: + return SubprocessCommandRunner().run +- except ImportError, e: ++ except ImportError as e: + errors.append(e) + + try: + return IronPythonCommandRunner().run +- except ImportError, e: ++ except ImportError as e: + errors.append(e) + + raise RuntimeError("couldn't find a working command runner", errors) +--- src/testoob/running/convenience.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/running/convenience.py +@@ -15,8 +15,8 @@ + + "convenience functions for running tests" + +-from __future__ import generators + ++ + import time + + ############################################################################### +@@ -38,7 +38,7 @@ class TestLoop(object): + suites, runner, interval=None, stop_on_fail=False, + extraction_decorators=None, fixture_decorators=None): + +- from fixture_decorators import BaseFixture ++ from .fixture_decorators import BaseFixture + self.suites = suites + self.runner = runner + self.interval = interval +@@ -73,7 +73,7 @@ class TestLoop(object): + self.last_result = self.runner.run(decorated_fixture) + + def _handle_interrupt(self, fixture): +- from fixture_decorators import get_interrupterd_fixture ++ from .fixture_decorators import get_interrupterd_fixture + if hasattr(self, "last_interrupt") and (time.time() - self.last_interrupt < 1): + # Two interrupts in less than a second, cause all + # future tests to skip +@@ -91,7 +91,7 @@ class TestLoop(object): + self._run_fixture(fixture) + if self.stop_on_fail and not self.last_result: + return +- except KeyboardInterrupt, e: ++ except KeyboardInterrupt as e: + self._handle_interrupt(fixture) + + def run(self): +@@ -140,7 +140,7 @@ def _create_reporter_proxy(reporters, runDebug, thread + def run_suites(suites, reporters, runner=None, runDebug=None, threads=None, **kwargs): + "Run the test suites" + if runner is None: +- from simplerunner import SimpleRunner ++ from .simplerunner import SimpleRunner + runner = SimpleRunner() + runner.reporter = _create_reporter_proxy(reporters, runDebug, threads=threads) + +--- src/testoob/running/listingrunner.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/running/listingrunner.py +@@ -15,7 +15,7 @@ + + "Runner that lists tests that would be run" + +-from baserunner import BaseRunner ++from .baserunner import BaseRunner + + class ListingRunner(BaseRunner): + """Just list the test names, don't run them. +@@ -30,9 +30,9 @@ class ListingRunner(BaseRunner): + + def done(self): + if self.output_format == None: +- print self.history.get_string() ++ print(self.history.get_string()) + elif self.output_format.lower() == "csv": +- print self.history.get_csv() ++ print(self.history.get_csv()) + + class _TestHistory: + def __init__(self): +@@ -49,10 +49,10 @@ class _TestHistory: + """Show all test methods. + """ + result = [] +- for (module_name, module_info) in self.modules.items(): ++ for (module_name, module_info) in list(self.modules.items()): + result.append("Module: %s (%s)" % \ + (module_name, module_info["filename"])) +- for (class_name, functions) in module_info["classes"].items(): ++ for (class_name, functions) in list(module_info["classes"].items()): + result.append("\tClass: %s (%d test functions)" %\ + (class_name, len(functions))) + for func in functions: +@@ -66,8 +66,8 @@ class _TestHistory: + #FIXXXME may be i should add the path that needs to be in sys.path + # in order to import the module.... + result = ["file,module,class,method,docstring"] +- for (module_name, module_info) in self.modules.items(): +- for (class_name, functions) in module_info["classes"].items(): ++ for (module_name, module_info) in list(self.modules.items()): ++ for (class_name, functions) in list(module_info["classes"].items()): + for func in functions: + data = [module_info["filename"], + module_name, +@@ -93,7 +93,7 @@ class _TestHistory: + + def _num_functions(self): + result = 0 +- for mod_info in self.modules.values(): +- for functions in mod_info["classes"].values(): ++ for mod_info in list(self.modules.values()): ++ for functions in list(mod_info["classes"].values()): + result += len(functions) + return result +--- src/testoob/testing.py.orig 2022-03-18 18:45:28 UTC ++++ src/testoob/testing.py +@@ -47,7 +47,7 @@ def assert_true(condition, msg=None): + def assert_equals(expected, actual, msg=None, filter=None): + "works like unittest.TestCase.assertEquals" + if filter is not None: +- actual = filter(actual) ++ actual = list(filter(actual)) + + if expected == actual: return + if msg is None: +@@ -58,7 +58,7 @@ def assert_matches(regex, actual, msg=None, filter=Non + "fail unless regex matches actual (using re.search)" + import re + if filter is not None: +- actual = filter(actual) ++ actual = list(filter(actual)) + + if re.search(regex, actual, re.DOTALL) is not None: return + +@@ -72,7 +72,7 @@ def _call_signature(callable, *args, **kwargs): + + From recipe http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/307970 + """ +- argv = [repr(arg) for arg in args] + ["%s=%r" % x for x in kwargs.items()] ++ argv = [repr(arg) for arg in args] + ["%s=%r" % x for x in list(kwargs.items())] + return "%s(%s)" % (callable.__name__, ", ".join(argv)) + + def assert_raises(exception_class, callable, *args, **kwargs): +@@ -94,7 +94,7 @@ def assert_raises(exception_class, callable, *args, ** + + try: + callable(*args, **kwargs) +- except exception_class, e: ++ except exception_class as e: + if expected_args is not None: + assert_equals( + expected_args, e.args, +@@ -131,7 +131,7 @@ def command_line( + used as the skip reason. + """ + +- from run_cmd import run_command ++ from .run_cmd import run_command + # run command + output, error, rc = run_command(args, input) + +@@ -154,7 +154,7 @@ def command_line( + assert_equals(expected_rc, rc) + if rc_predicate is not None: + assert_true(rc_predicate(rc)) +- except TestoobAssertionError, e: ++ except TestoobAssertionError as e: + assert e.long_message is None + def annotated_err_string(name, value): + if not value: return "== %s: NONE" % name diff --git a/devel/py-types-docutils/Makefile b/devel/py-types-docutils/Makefile index 98f52de9540..4a29f165b09 100644 --- a/devel/py-types-docutils/Makefile +++ b/devel/py-types-docutils/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= types-docutils -PORTVERSION= 0.17.7 +PORTVERSION= 0.18.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-types-docutils/distinfo b/devel/py-types-docutils/distinfo index afa99505527..ecdb3b8e856 100644 --- a/devel/py-types-docutils/distinfo +++ b/devel/py-types-docutils/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058048 -SHA256 (types-docutils-0.17.7.tar.gz) = 3d856ea26551a998c8e2c99a0bafe5e4d391811955f17dab6c9be73b0fc67b66 -SIZE (types-docutils-0.17.7.tar.gz) = 7565 +TIMESTAMP = 1647264578 +SHA256 (types-docutils-0.18.0.tar.gz) = 14f781eb28d89a1cd61f1c41bd0776ad3bb4e2333d317c37d2c67f2eaf5891fe +SIZE (types-docutils-0.18.0.tar.gz) = 8351 diff --git a/devel/py-urlimport/files/patch-2to3 b/devel/py-urlimport/files/patch-2to3 new file mode 100644 index 00000000000..4397764790a --- /dev/null +++ b/devel/py-urlimport/files/patch-2to3 @@ -0,0 +1,68 @@ +--- urlimport.py.orig 2022-03-18 17:06:46 UTC ++++ urlimport.py +@@ -34,7 +34,7 @@ settings = sys.__dict__.setdefault( + + def debug(s, pf='| |', lvl=1): + if lvl <= settings.get('debug'): +- print "%s %s" % (pf, s) ++ print("%s %s" % (pf, s)) + + class UrlFinder: + def __init__(self, path): +@@ -60,7 +60,7 @@ class UrlFinder: + (self.path + fullname + '/__init__.py', self.path + fullname + '/')]: + try: + source = self.get_source(url) +- except Exception, e: ++ except Exception as e: + debug("find_module: failed to get '%s'. (%s)" % (url, e), lvl=3) + else: + debug("find_module: got '%s'." % url, lvl=1) +@@ -71,7 +71,7 @@ class UrlFinder: + def get_source(self, url): + """Download the source from given url. + """ +- from urllib2 import urlopen ++ from urllib.request import urlopen + + src = '' + +@@ -85,9 +85,9 @@ class UrlFinder: + + if proto == 'https' and cert: + # handle http over ssl with client certificate +- import httplib ++ import http.client + +- conn = httplib.HTTPSConnection( ++ conn = http.client.HTTPSConnection( + host=host, + port=port, + key_file=key, +@@ -98,7 +98,7 @@ class UrlFinder: + conn.endheaders() + response = conn.getresponse() + if response.status != 200: +- raise StandardError, "HTTPS Error: %d"%response.status ++ raise Exception("HTTPS Error: %d"%response.status) + src = response.read() + else: + # handle everything else +@@ -131,7 +131,7 @@ class UrlLoader: + + debug("load_module: executing %s's source..." % fullname, lvl=2) + +- exec self.source in mod.__dict__ ++ exec(self.source, mod.__dict__) + + mod = sys.modules[fullname] + return mod +@@ -142,7 +142,7 @@ def config(**kwargs): + config() - Display settings. + """ + settings.update(kwargs) +- for k,v in (kwargs or settings).iteritems(): ++ for k,v in (kwargs or settings).items(): + debug(" "+str(k)+"="+repr(v), lvl=0 ) + + # register The Hook diff --git a/devel/py-userpath/Makefile b/devel/py-userpath/Makefile index b97839bac38..a1fae0a48ed 100644 --- a/devel/py-userpath/Makefile +++ b/devel/py-userpath/Makefile @@ -2,6 +2,7 @@ PORTNAME= userpath PORTVERSION= 1.8.0 +PORTREVISION= 1 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-userpath/files/setup.py b/devel/py-userpath/files/setup.py index d0513408f9b..0b964e937fb 100644 --- a/devel/py-userpath/files/setup.py +++ b/devel/py-userpath/files/setup.py @@ -28,7 +28,6 @@ setup( ], }, packages=[ - 'tests', 'userpath', ], ) diff --git a/devel/py-wrapt/Makefile b/devel/py-wrapt/Makefile index ce22fb84201..fbed0a206ba 100644 --- a/devel/py-wrapt/Makefile +++ b/devel/py-wrapt/Makefile @@ -1,5 +1,5 @@ PORTNAME= wrapt -PORTVERSION= 1.13.3 +PORTVERSION= 1.14.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-wrapt/distinfo b/devel/py-wrapt/distinfo index 069d3c94c4e..e8ac132dd43 100644 --- a/devel/py-wrapt/distinfo +++ b/devel/py-wrapt/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1635730985 -SHA256 (wrapt-1.13.3.tar.gz) = 1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185 -SIZE (wrapt-1.13.3.tar.gz) = 48871 +TIMESTAMP = 1647264580 +SHA256 (wrapt-1.14.0.tar.gz) = 8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311 +SIZE (wrapt-1.14.0.tar.gz) = 50796 diff --git a/devel/py-xarray/Makefile b/devel/py-xarray/Makefile index c8c912e0d85..3956a023b73 100644 --- a/devel/py-xarray/Makefile +++ b/devel/py-xarray/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= xarray -PORTVERSION= 0.21.1 +PORTVERSION= 2022.3.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-xarray/distinfo b/devel/py-xarray/distinfo index ec6e950c4fb..77a2a7ffa17 100644 --- a/devel/py-xarray/distinfo +++ b/devel/py-xarray/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058054 -SHA256 (xarray-0.21.1.tar.gz) = 0cd5a17c1271d6b468fb3872bd2ca196351cd522719275c436e45cac1d1ffc8b -SIZE (xarray-0.21.1.tar.gz) = 2936861 +TIMESTAMP = 1647264582 +SHA256 (xarray-2022.3.0.tar.gz) = 398344bf7d170477aaceff70210e11ebd69af6b156fe13978054d25c48729440 +SIZE (xarray-2022.3.0.tar.gz) = 2943007 diff --git a/devel/py-yapps2/files/patch-2to3 b/devel/py-yapps2/files/patch-2to3 new file mode 100644 index 00000000000..bf8d6a1f79f --- /dev/null +++ b/devel/py-yapps2/files/patch-2to3 @@ -0,0 +1,162 @@ +--- lib/yapps/grammar.py.orig 2005-09-15 07:55:16 UTC ++++ lib/yapps/grammar.py +@@ -35,7 +35,7 @@ def resolve_name(rule, tokens, id, args): + if id in [x[0] for x in tokens]: + # It's a token + if args: +- print 'Warning: ignoring parameters on TOKEN %s<<%s>>' % (id, args) ++ print('Warning: ignoring parameters on TOKEN %s<<%s>>' % (id, args)) + return parsetree.Terminal(rule, id) + else: + # It's a name, so assume it's a nonterminal +--- lib/yapps/parsetree.py.orig 2005-09-15 07:55:40 UTC ++++ lib/yapps/parsetree.py +@@ -41,8 +41,8 @@ class Generator: + if n == '#ignore': + n = t + self.ignore.append(n) +- if n in self.tokens.keys() and self.tokens[n] != t: +- print >>sys.stderr, 'Warning: token %s defined more than once.' % n ++ if n in list(self.tokens.keys()) and self.tokens[n] != t: ++ print('Warning: token %s defined more than once.' % n, file=sys.stderr) + self.tokens[n] = t + self.terminals.append(n) + +@@ -236,20 +236,20 @@ class Generator: + """Display the grammar in somewhat human-readable form.""" + self.calculate() + for r in self.goals: +- print ' _____' + '_'*len(r) +- print ('___/Rule '+r+'\\' + '_'*80)[:79] ++ print(' _____' + '_'*len(r)) ++ print(('___/Rule '+r+'\\' + '_'*80)[:79]) + queue = [self.rules[r]] + while queue: + top = queue[0] + del queue[0] + +- print 'Rule', repr(top), 'of class', top.__class__.__name__ ++ print('Rule', repr(top), 'of class', top.__class__.__name__) + top.first.sort() + top.follow.sort() + eps = [] + if top.accepts_epsilon: eps = ['(null)'] +- print ' FIRST:', ', '.join(top.first+eps) +- print ' FOLLOW:', ', '.join(top.follow) ++ print(' FIRST:', ', '.join(top.first+eps)) ++ print(' FOLLOW:', ', '.join(top.follow)) + for x in top.get_children(): queue.append(x) + + def generate_output(self): +@@ -390,7 +390,7 @@ class NonTerminal(Node): + self.accepts_epsilon = self.target.accepts_epsilon + gen.changed() + except KeyError: # Oops, it's nonexistent +- print >>sys.stderr, 'Error: no rule <%s>' % self.name ++ print('Error: no rule <%s>' % self.name, file=sys.stderr) + self.target = self + + def __str__(self): +@@ -518,12 +518,12 @@ class Choice(Node): + tokens_seen = tokens_seen + testset + if removed: + if not testset: +- print >>sys.stderr, 'Error in rule', self.rule+':' ++ print('Error in rule', self.rule+':', file=sys.stderr) + else: +- print >>sys.stderr, 'Warning in rule', self.rule+':' +- print >>sys.stderr, ' *', self +- print >>sys.stderr, ' * These tokens could be matched by more than one clause:' +- print >>sys.stderr, ' *', ' '.join(removed) ++ print('Warning in rule', self.rule+':', file=sys.stderr) ++ print(' *', self, file=sys.stderr) ++ print(' * These tokens could be matched by more than one clause:', file=sys.stderr) ++ print(' *', ' '.join(removed), file=sys.stderr) + + if testset: + if not tokens_unseen: # context sensitive scanners only! +@@ -582,7 +582,7 @@ class Option(Wrapper): + + def output(self, gen, indent): + if self.child.accepts_epsilon: +- print >>sys.stderr, 'Warning in rule', self.rule+': contents may be empty.' ++ print('Warning in rule', self.rule+': contents may be empty.', file=sys.stderr) + gen.write(indent, "if %s:\n" % + gen.peek_test(self.first, self.child.first)) + self.child.output(gen, indent+INDENT) +@@ -604,8 +604,8 @@ class Plus(Wrapper): + + def output(self, gen, indent): + if self.child.accepts_epsilon: +- print >>sys.stderr, 'Warning in rule', self.rule+':' +- print >>sys.stderr, ' * The repeated pattern could be empty. The resulting parser may not work properly.' ++ print('Warning in rule', self.rule+':', file=sys.stderr) ++ print(' * The repeated pattern could be empty. The resulting parser may not work properly.', file=sys.stderr) + gen.write(indent, "while 1:\n") + self.child.output(gen, indent+INDENT) + union = self.first[:] +@@ -630,8 +630,8 @@ class Star(Wrapper): + + def output(self, gen, indent): + if self.child.accepts_epsilon: +- print >>sys.stderr, 'Warning in rule', self.rule+':' +- print >>sys.stderr, ' * The repeated pattern could be empty. The resulting parser probably will not work properly.' ++ print('Warning in rule', self.rule+':', file=sys.stderr) ++ print(' * The repeated pattern could be empty. The resulting parser probably will not work properly.', file=sys.stderr) + gen.write(indent, "while %s:\n" % + gen.peek_test(self.follow, self.child.first)) + self.child.output(gen, indent+INDENT) +--- lib/yapps/yapps_grammar.py.orig 2005-09-15 07:56:02 UTC ++++ lib/yapps/yapps_grammar.py +@@ -35,7 +35,7 @@ def resolve_name(rule, tokens, id, args): + if id in [x[0] for x in tokens]: + # It's a token + if args: +- print 'Warning: ignoring parameters on TOKEN %s<<%s>>' % (id, args) ++ print('Warning: ignoring parameters on TOKEN %s<<%s>>' % (id, args)) + return parsetree.Terminal(rule, id) + else: + # It's a name, so assume it's a nonterminal +--- lib/yapps/yappsrt.py.orig 2005-09-15 07:56:12 UTC ++++ lib/yapps/yappsrt.py +@@ -272,8 +272,8 @@ def print_line_with_pointer(text, p): + p = p - 7 + + # Now print the string, along with an indicator +- print >>sys.stderr, '> ',text +- print >>sys.stderr, '> ',' '*p + '^' ++ print('> ',text, file=sys.stderr) ++ print('> ',' '*p + '^', file=sys.stderr) + + def print_error(input, err, scanner): + """Print error messages, the parser stack, and the input text -- for human-readable error messages.""" +@@ -281,7 +281,7 @@ def print_error(input, err, scanner): + # Figure out the line number + line_number = scanner.get_line_number() + column_number = scanner.get_column_number() +- print >>sys.stderr, '%d:%d: %s' % (line_number, column_number, err.msg) ++ print('%d:%d: %s' % (line_number, column_number, err.msg), file=sys.stderr) + + context = err.context + if not context: +@@ -289,16 +289,16 @@ def print_error(input, err, scanner): + + while context: + # TODO: add line number +- print >>sys.stderr, 'while parsing %s%s:' % (context.rule, tuple(context.args)) ++ print('while parsing %s%s:' % (context.rule, tuple(context.args)), file=sys.stderr) + print_line_with_pointer(input, context.scanner.get_prev_char_pos(context.tokenpos)) + context = context.parent + + def wrap_error_reporter(parser, rule): + try: + return getattr(parser, rule)() +- except SyntaxError, e: ++ except SyntaxError as e: + input = parser._scanner.input + print_error(input, e, parser._scanner) + except NoMoreTokens: +- print >>sys.stderr, 'Could not complete parsing; stopped around here:' +- print >>sys.stderr, parser._scanner ++ print('Could not complete parsing; stopped around here:', file=sys.stderr) ++ print(parser._scanner, file=sys.stderr) diff --git a/devel/py-z3c.autoinclude/files/patch-2to3 b/devel/py-z3c.autoinclude/files/patch-2to3 new file mode 100644 index 00000000000..6f00467ae26 --- /dev/null +++ b/devel/py-z3c.autoinclude/files/patch-2to3 @@ -0,0 +1,11 @@ +--- src/z3c/autoinclude/dependency.py.orig 2016-01-29 12:30:33 UTC ++++ src/z3c/autoinclude/dependency.py +@@ -24,7 +24,7 @@ class DependencyFinder(DistributionManager): + for dotted_name in dist_manager.dottedNames(): + try: + module = resolve(dotted_name) +- except ImportError, exc: ++ except ImportError as exc: + logging.getLogger("z3c.autoinclude").warn( + "resolve(%r) raised import error: %s" % (dotted_name, exc)) + continue diff --git a/devel/py-zict/Makefile b/devel/py-zict/Makefile index 3046f5039a9..01e3af08cd1 100644 --- a/devel/py-zict/Makefile +++ b/devel/py-zict/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= zict -PORTVERSION= 2.0.0 +PORTVERSION= 2.1.0 CATEGORIES= devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/py-zict/distinfo b/devel/py-zict/distinfo index ed47acaa0b9..f4a38283a7a 100644 --- a/devel/py-zict/distinfo +++ b/devel/py-zict/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1582975677 -SHA256 (zict-2.0.0.tar.gz) = 8e2969797627c8a663575c2fc6fcb53a05e37cdb83ee65f341fc6e0c3d0ced16 -SIZE (zict-2.0.0.tar.gz) = 11571 +TIMESTAMP = 1647264584 +SHA256 (zict-2.1.0.tar.gz) = 15b2cc15f95a476fbe0623fd8f771e1e771310bf7a01f95412a0b605b6e47510 +SIZE (zict-2.1.0.tar.gz) = 13639 diff --git a/devel/py-zope.cachedescriptors/files/patch-2to3 b/devel/py-zope.cachedescriptors/files/patch-2to3 new file mode 100644 index 00000000000..0cc55119202 --- /dev/null +++ b/devel/py-zope.cachedescriptors/files/patch-2to3 @@ -0,0 +1,11 @@ +--- src/zope/cachedescriptors/property.py.orig 2007-05-09 07:11:30 UTC ++++ src/zope/cachedescriptors/property.py +@@ -16,7 +16,7 @@ See the CachedProperty class. + $Id: property.py 75652 2007-05-09 13:11:30Z zagy $ + """ + +-ncaches = 0l ++ncaches = 0 + + + class CachedProperty(object): diff --git a/devel/py-zope.contenttype/files/patch-2to3 b/devel/py-zope.contenttype/files/patch-2to3 new file mode 100644 index 00000000000..490d88cb1cf --- /dev/null +++ b/devel/py-zope.contenttype/files/patch-2to3 @@ -0,0 +1,31 @@ +--- src/zope/contenttype/__init__.py.orig 2011-07-27 03:41:00 UTC ++++ src/zope/contenttype/__init__.py +@@ -106,6 +106,6 @@ here = os.path.dirname(os.path.abspath(__file__)) + add_files([os.path.join(here, "mime.types")]) + + if __name__ == '__main__': +- items = mimetypes.types_map.items() ++ items = list(mimetypes.types_map.items()) + items.sort() +- for item in items: print "%s:\t%s" % item ++ for item in items: print("%s:\t%s" % item) +--- src/zope/contenttype/parse.py.orig 2010-04-05 21:04:38 UTC ++++ src/zope/contenttype/parse.py +@@ -112,14 +112,15 @@ def _unescape(string): + return string + + +-def join((major, minor, params)): ++def join(xxx_todo_changeme): ++ (major, minor, params) = xxx_todo_changeme + pstr = "" + try: + params.items + except AttributeError: + pass + else: +- params = params.items() ++ params = list(params.items()) + # ensure a predictable order: + params.sort() + for name, value in params: diff --git a/devel/py-zope.datetime/files/patch-2to3 b/devel/py-zope.datetime/files/patch-2to3 new file mode 100644 index 00000000000..085d5e1234e --- /dev/null +++ b/devel/py-zope.datetime/files/patch-2to3 @@ -0,0 +1,167 @@ +--- src/zope/datetime/__init__.py.orig 2011-11-29 16:29:14 UTC ++++ src/zope/datetime/__init__.py +@@ -87,7 +87,7 @@ i=int(yr-1) + to_year =int(i*365+i/4-i/100+i/400-693960.0) + to_month=tm[yr%4==0 and (yr%100!=0 or yr%400==0)][mo] + EPOCH =(to_year+to_month+dy+(hr/24.0+mn/1440.0+sc/86400.0))*86400 +-jd1901 =2415385L ++jd1901 =2415385 + + + numericTimeZoneMatch=re.compile(r'[+-][0-9][0-9][0-9][0-9]').match #TS +@@ -282,7 +282,7 @@ class _cache: + + def __init__(self): + self._db = _data +- self._d, self._zidx= {}, self._zmap.keys() ++ self._d, self._zidx= {}, list(self._zmap.keys()) + + def __getitem__(self,k): + try: n=self._zmap[k.lower()] +@@ -337,28 +337,28 @@ def _calcDependentSecond(tz, t): + # Calculates the timezone-dependent second (integer part only) + # from the timezone-independent second. + fset = _tzoffset(tz, t) +- return fset + long(math.floor(t)) + long(EPOCH) - 86400L ++ return fset + int(math.floor(t)) + int(EPOCH) - 86400 + + def _calcDependentSecond2(yr,mo,dy,hr,mn,sc): + # Calculates the timezone-dependent second (integer part only) + # from the date given. + ss = int(hr) * 3600 + int(mn) * 60 + int(sc) +- x = long(_julianday(yr,mo,dy)-jd1901) * 86400 + ss ++ x = int(_julianday(yr,mo,dy)-jd1901) * 86400 + ss + return x + + def _calcIndependentSecondEtc(tz, x, ms): + # Derive the timezone-independent second from the timezone + # dependent second. + fsetAtEpoch = _tzoffset(tz, 0.0) +- nearTime = x - fsetAtEpoch - long(EPOCH) + 86400L + ms ++ nearTime = x - fsetAtEpoch - int(EPOCH) + 86400 + ms + # nearTime is now within an hour of being correct. + # Recalculate t according to DST. +- fset = long(_tzoffset(tz, nearTime)) ++ fset = int(_tzoffset(tz, nearTime)) + x_adjusted = x - fset + ms + d = x_adjusted / 86400.0 +- t = x_adjusted - long(EPOCH) + 86400L ++ t = x_adjusted - int(EPOCH) + 86400 + millis = (x + 86400 - fset) * 1000 + \ +- long(ms * 1000.0) - long(EPOCH * 1000.0) ++ int(ms * 1000.0) - int(EPOCH * 1000.0) + s = d - math.floor(d) + return s,d,t,millis + +@@ -382,34 +382,34 @@ def _calcYMDHMS(x, ms): + return yr,mo,dy,hr,mn,sc + + def _julianday(yr,mo,dy): +- y,m,d=long(yr),long(mo),long(dy) +- if m > 12L: +- y=y+m/12L +- m=m%12L +- elif m < 1L: ++ y,m,d=int(yr),int(mo),int(dy) ++ if m > 12: ++ y=y+m/12 ++ m=m%12 ++ elif m < 1: + m=-m +- y=y-m/12L-1L +- m=12L-m%12L +- if y > 0L: yr_correct=0L +- else: yr_correct=3L +- if m < 3L: y, m=y-1L,m+12L +- if y*10000L+m*100L+d > 15821014L: b=2L-y/100L+y/400L +- else: b=0L +- return (1461L*y-yr_correct)/4L+306001L*(m+1L)/10000L+d+1720994L+b ++ y=y-m/12-1 ++ m=12-m%12 ++ if y > 0: yr_correct=0 ++ else: yr_correct=3 ++ if m < 3: y, m=y-1,m+12 ++ if y*10000+m*100+d > 15821014: b=2-y/100+y/400 ++ else: b=0 ++ return (1461*y-yr_correct)/4+306001*(m+1)/10000+d+1720994+b + + def _calendarday(j): +- j=long(j) +- if(j < 2299160L): +- b=j+1525L ++ j=int(j) ++ if(j < 2299160): ++ b=j+1525 + else: +- a=(4L*j-7468861L)/146097L +- b=j+1526L+a-a/4L +- c=(20L*b-2442L)/7305L +- d=1461L*c/4L +- e=10000L*(b-d)/306001L +- dy=int(b-d-306001L*e/10000L) +- mo=(e < 14L) and int(e-1L) or int(e-13L) +- yr=(mo > 2) and (c-4716L) or (c-4715L) ++ a=(4*j-7468861)/146097 ++ b=j+1526+a-a/4 ++ c=(20*b-2442)/7305 ++ d=1461*c/4 ++ e=10000*(b-d)/306001 ++ dy=int(b-d-306001*e/10000) ++ mo=(e < 14) and int(e-1) or int(e-13) ++ yr=(mo > 2) and (c-4716) or (c-4715) + return int(yr),int(mo),int(dy) + + def _tzoffset(tz, t): +@@ -619,7 +619,7 @@ class DateTimeParser: + if not self._multipleZones: + return self._localzone0 + fsetAtEpoch = _tzoffset(self._localzone0, 0.0) +- nearTime = x - fsetAtEpoch - long(EPOCH) + 86400L + ms ++ nearTime = x - fsetAtEpoch - int(EPOCH) + 86400 + ms + # nearTime is within an hour of being correct. + try: + ltm = safelocaltime(nearTime) +@@ -631,7 +631,7 @@ class DateTimeParser: + yr,mo,dy,hr,mn,sc = _calcYMDHMS(x, 0) + yr = ((yr - 1970) % 28) + 1970 + x = _calcDependentSecond2(yr,mo,dy,hr,mn,sc) +- nearTime = x - fsetAtEpoch - long(EPOCH) + 86400L + ms ++ nearTime = x - fsetAtEpoch - int(EPOCH) + 86400 + ms + ltm = safelocaltime(nearTime) + tz = self.localZone(ltm) + return tz +--- src/zope/datetime/timezones.py.orig 2011-11-29 16:29:14 UTC ++++ src/zope/datetime/timezones.py +@@ -1178,23 +1178,23 @@ historical_zone_info = { + + def dumpTimezoneInfo(_data): + +- print "historical_zone_info = {" ++ print("historical_zone_info = {") + +- items = _data.items() ++ items = list(_data.items()) + items.sort() + for key, value in items: + v1, v2, v3, ilist, bitmap, two_by_three, two_nullterm = value +- print "'%s': ('%s', %s, %s," % (key, v1, v2, v3) +- print "[", ++ print("'%s': ('%s', %s, %s," % (key, v1, v2, v3)) ++ print("[", end=' ') + while ilist: + next_5, ilist = ilist[:5], ilist[5:] + line = ", ".join(["'%s'" % x for x in next_5]) +- print "%s," % line +- print "], " +- print "%s," % repr(bitmap) +- print "%s, %s)," % (repr(two_by_three), repr(two_nullterm)) ++ print("%s," % line) ++ print("], ") ++ print("%s," % repr(bitmap)) ++ print("%s, %s)," % (repr(two_by_three), repr(two_nullterm))) + +- print "}" ++ print("}") + + if __name__ == '__main__': + dumpTimezoneInfo(historical_zone_info) diff --git a/devel/py-zope.deferredimport/files/patch-2to3 b/devel/py-zope.deferredimport/files/patch-2to3 new file mode 100644 index 00000000000..fa1466d0c94 --- /dev/null +++ b/devel/py-zope.deferredimport/files/patch-2to3 @@ -0,0 +1,55 @@ +--- src/zope/deferredimport/deferredmodule.py.orig 2010-09-25 12:48:49 UTC ++++ src/zope/deferredimport/deferredmodule.py +@@ -65,7 +65,7 @@ class ModuleProxy(zope.proxy.ProxyBase): + try: + get = self.__deferred_definitions__.pop(name) + except KeyError: +- raise AttributeError, name ++ raise AttributeError(name) + v = get.get() + setattr(self, name, v) + return v +@@ -108,7 +108,7 @@ def define(**names): + """ + module = initialize(2) + __deferred_definitions__ = module.__deferred_definitions__ +- for name, specifier in names.iteritems(): ++ for name, specifier in names.items(): + __deferred_definitions__[name] = Deferred(name, specifier) + + def defineFrom(from_name, *names): +@@ -135,7 +135,7 @@ def deprecated(message, **names): + """ + module = initialize(2) + __deferred_definitions__ = module.__deferred_definitions__ +- for name, specifier in names.iteritems(): ++ for name, specifier in names.items(): + __deferred_definitions__[name] = DeferredAndDeprecated( + name, specifier, message) + +--- src/zope/deferredimport/tests.py.orig 2010-09-25 12:48:49 UTC ++++ src/zope/deferredimport/tests.py +@@ -39,20 +39,20 @@ def warn(message, type_, stacklevel): + for i in range(lineno): + line = file.readline() + +- print "%s:%s: %s: %s\n %s" % ( ++ print("%s:%s: %s: %s\n %s" % ( + path, + frame.f_lineno, + type_.__name__, + message, + line.strip(), +- ) ++ )) + + + def setUp(test): + d = test.globs['tmp_d'] = tempfile.mkdtemp('deferredimport') + + def create_module(**modules): +- for name, src in modules.iteritems(): ++ for name, src in modules.items(): + f = open(os.path.join(d, name+'.py'), 'w') + f.write(src) + f.close() diff --git a/devel/py-zope.dottedname/Makefile b/devel/py-zope.dottedname/Makefile index 694a60d7027..15dbfad3e7e 100644 --- a/devel/py-zope.dottedname/Makefile +++ b/devel/py-zope.dottedname/Makefile @@ -14,4 +14,6 @@ LICENSE= ZPL21 USES= python:3.6+ USE_PYTHON= distutils autoplist +NO_ARCH= yes + .include diff --git a/devel/py-zope.generations/files/patch-2to3 b/devel/py-zope.generations/files/patch-2to3 new file mode 100644 index 00000000000..4c13011a0d1 --- /dev/null +++ b/devel/py-zope.generations/files/patch-2to3 @@ -0,0 +1,22 @@ +--- src/zope/generations/generations.py.orig 2011-12-22 19:48:30 UTC ++++ src/zope/generations/generations.py +@@ -19,8 +19,8 @@ import transaction + import zope.component + import zope.interface + +-from interfaces import GenerationTooHigh, GenerationTooLow, UnableToEvolve +-from interfaces import ISchemaManager, IInstallableSchemaManager ++from .interfaces import GenerationTooHigh, GenerationTooLow, UnableToEvolve ++from .interfaces import ISchemaManager, IInstallableSchemaManager + + + logger = logging.getLogger('zope.generations') +@@ -147,7 +147,7 @@ class SchemaManager(object): + + try: + evolver = __import__(name, {}, {}, ['*']) +- except ImportError, m: ++ except ImportError as m: + if str(m) not in ('No module named %s' % name, + 'No module named install'): + # This was an import error *within* the module, so we re-raise. diff --git a/devel/py-zope.i18n/files/patch-2to3 b/devel/py-zope.i18n/files/patch-2to3 new file mode 100644 index 00000000000..7764ca1fd65 --- /dev/null +++ b/devel/py-zope.i18n/files/patch-2to3 @@ -0,0 +1,529 @@ +--- src/zope/i18n/format.py.orig 2012-03-15 13:58:09 UTC ++++ src/zope/i18n/format.py +@@ -86,7 +86,7 @@ class DateTimeFormat(object): + + # Map the parsing results to a datetime object + ordered = [None, None, None, None, None, None, None] +- bin_pattern = filter(lambda x: isinstance(x, tuple), bin_pattern) ++ bin_pattern = [x for x in bin_pattern if isinstance(x, tuple)] + + # Handle years; note that only 'yy' and 'yyyy' are allowed + if ('y', 2) in bin_pattern: +@@ -124,7 +124,7 @@ class DateTimeFormat(object): + + # Shortcut for the simple int functions + dt_fields_map = {'d': 2, 'H': 3, 'm': 4, 's': 5, 'S': 6} +- for field in dt_fields_map.keys(): ++ for field in list(dt_fields_map.keys()): + entry = _findFormattingCharacterInPattern(field, bin_pattern) + if not entry: continue + pos = dt_fields_map[field] +@@ -186,7 +186,7 @@ class DateTimeFormat(object): + else: + bin_pattern = self._bin_pattern + +- text = u'' ++ text = '' + info = buildDateTimeInfo(obj, self.calendar, bin_pattern) + for elem in bin_pattern: + text += info.get(elem, elem) +@@ -209,18 +209,18 @@ class NumberFormat(object): + def __init__(self, pattern=None, symbols={}): + # setup default symbols + self.symbols = { +- u'decimal': u'.', +- u'group': u',', +- u'list': u';', +- u'percentSign': u'%', +- u'nativeZeroDigit': u'0', +- u'patternDigit': u'#', +- u'plusSign': u'+', +- u'minusSign': u'-', +- u'exponential': u'E', +- u'perMille': u'\xe2\x88\x9e', +- u'infinity': u'\xef\xbf\xbd', +- u'nan': '' } ++ 'decimal': '.', ++ 'group': ',', ++ 'list': ';', ++ 'percentSign': '%', ++ 'nativeZeroDigit': '0', ++ 'patternDigit': '#', ++ 'plusSign': '+', ++ 'minusSign': '-', ++ 'exponential': 'E', ++ 'perMille': '\xe2\x88\x9e', ++ 'infinity': '\xef\xbf\xbd', ++ 'nan': '' } + self.symbols.update(symbols) + self._pattern = pattern + self._bin_pattern = None +@@ -360,7 +360,7 @@ class NumberFormat(object): + # The exponential might have a mandatory sign; remove it from the + # bin_pattern and remember the setting + exp_bin_pattern = bin_pattern[EXPONENTIAL] +- plus_sign = u'' ++ plus_sign = '' + if exp_bin_pattern.startswith('+'): + plus_sign = self.symbols['plusSign'] + exp_bin_pattern = exp_bin_pattern[1:] +@@ -444,7 +444,7 @@ class NumberFormat(object): + text += bin_pattern[PADDING4]*post_padding + + # TODO: Need to make sure unicode is everywhere +- return unicode(text) ++ return str(text) + + + +@@ -599,7 +599,7 @@ def buildDateTimeInfo(dt, calendar, pattern): + """Create the bits and pieces of the datetime object that can be put + together.""" + if isinstance(dt, datetime.time): +- dt = datetime.datetime(1969, 01, 01, dt.hour, dt.minute, dt.second, ++ dt = datetime.datetime(1969, 0o1, 0o1, dt.hour, dt.minute, dt.second, + dt.microsecond) + elif (isinstance(dt, datetime.date) and + not isinstance(dt, datetime.datetime)): +@@ -631,8 +631,8 @@ def buildDateTimeInfo(dt, calendar, pattern): + tz_name = tzinfo.tzname(dt) or tz_defaultname + tz_fullname = getattr(tzinfo, 'zone', None) or tz_name + +- info = {('y', 2): unicode(dt.year)[2:], +- ('y', 4): unicode(dt.year), ++ info = {('y', 2): str(dt.year)[2:], ++ ('y', 4): str(dt.year), + } + + # Generic Numbers +@@ -643,7 +643,7 @@ def buildDateTimeInfo(dt, calendar, pattern): + ('S', dt.microsecond), ('w', int(dt.strftime('%W'))), + ('W', week_in_month)): + for entry in _findFormattingCharacterInPattern(field, pattern): +- info[entry] = (u'%%.%ii' %entry[1]) %value ++ info[entry] = ('%%.%ii' %entry[1]) %value + + # am/pm marker (Text) + for entry in _findFormattingCharacterInPattern('a', pattern): +@@ -657,9 +657,9 @@ def buildDateTimeInfo(dt, calendar, pattern): + # time zone (Text) + for entry in _findFormattingCharacterInPattern('z', pattern): + if entry[1] == 1: +- info[entry] = u"%s%i%.2i" %(tz_sign, tz_hours, tz_mins) ++ info[entry] = "%s%i%.2i" %(tz_sign, tz_hours, tz_mins) + elif entry[1] == 2: +- info[entry] = u"%s%.2i:%.2i" %(tz_sign, tz_hours, tz_mins) ++ info[entry] = "%s%.2i:%.2i" %(tz_sign, tz_hours, tz_mins) + elif entry[1] == 3: + info[entry] = tz_name + else: +@@ -668,9 +668,9 @@ def buildDateTimeInfo(dt, calendar, pattern): + # month in year (Text and Number) + for entry in _findFormattingCharacterInPattern('M', pattern): + if entry[1] == 1: +- info[entry] = u'%i' %dt.month ++ info[entry] = '%i' %dt.month + elif entry[1] == 2: +- info[entry] = u'%.2i' %dt.month ++ info[entry] = '%.2i' %dt.month + elif entry[1] == 3: + info[entry] = calendar.months[dt.month][1] + else: +@@ -679,9 +679,9 @@ def buildDateTimeInfo(dt, calendar, pattern): + # day in week (Text and Number) + for entry in _findFormattingCharacterInPattern('E', pattern): + if entry[1] == 1: +- info[entry] = u'%i' %weekday ++ info[entry] = '%i' %weekday + elif entry[1] == 2: +- info[entry] = u'%.2i' %weekday ++ info[entry] = '%.2i' %weekday + elif entry[1] == 3: + info[entry] = calendar.days[dt.weekday() + 1][1] + else: +--- src/zope/i18n/locales/tests/test_locales.py.orig 2012-03-15 13:58:06 UTC ++++ src/zope/i18n/locales/tests/test_locales.py +@@ -35,7 +35,7 @@ class TestILocaleProvider(TestCase): + self.locales = self._makeNewProvider() + + def testInterfaceConformity(self): +- self.assert_(ILocaleProvider.providedBy(self.locales)) ++ self.assertTrue(ILocaleProvider.providedBy(self.locales)) + + def test_getLocale(self): + locale = self.locales.getLocale(None, None, None) +@@ -66,10 +66,10 @@ class TestLocaleProvider(TestILocaleProvider): + + def test_loadLocale(self): + self.locales.loadLocale(None, None, None) +- self.assertEqual(self.locales._locales.keys(), [(None, None, None)]) ++ self.assertEqual(list(self.locales._locales.keys()), [(None, None, None)]) + + self.locales.loadLocale('en', None, None) +- self.assert_(('en', None, None) in self.locales._locales.keys()) ++ self.assertTrue(('en', None, None) in list(self.locales._locales.keys())) + + def test_loadLocaleFailure(self): + self.assertRaises(LoadLocaleError, self.locales.loadLocale, 'zzz') +@@ -97,19 +97,19 @@ class TestLocaleAndProvider(TestCase): + def test_getDateFormatter(self): + formatter = self.locale.dates.getFormatter('date', 'medium') + self.assertEqual(formatter.getPattern(), 'MMM d, yyyy') +- self.assertEqual(formatter.format(datetime.date(2003, 01, 02)), ++ self.assertEqual(formatter.format(datetime.date(2003, 0o1, 0o2)), + 'Jan 2, 2003') + self.assertEqual(formatter.parse('Jan 2, 2003'), +- datetime.date(2003, 01, 02)) ++ datetime.date(2003, 0o1, 0o2)) + + def test_getDateTimeFormatter(self): + formatter = self.locale.dates.getFormatter('dateTime', 'medium') + self.assertEqual(formatter.getPattern(), 'MMM d, yyyy h:mm:ss a') + self.assertEqual( +- formatter.format(datetime.datetime(2003, 01, 02, 12, 30)), ++ formatter.format(datetime.datetime(2003, 0o1, 0o2, 12, 30)), + 'Jan 2, 2003 12:30:00 PM') + self.assertEqual(formatter.parse('Jan 2, 2003 12:30:00 PM'), +- datetime.datetime(2003, 01, 02, 12, 30)) ++ datetime.datetime(2003, 0o1, 0o2, 12, 30)) + + def test_getNumberFormatter(self): + formatter = self.locale.numbers.getFormatter('decimal') +@@ -124,13 +124,13 @@ class TestGlobalLocaleProvider(TestCase): + + def testLoading(self): + locales.loadLocale(None, None, None) +- self.assert_(locales._locales.has_key((None, None, None))) ++ self.assertTrue((None, None, None) in locales._locales) + locales.loadLocale('en', None, None) +- self.assert_(locales._locales.has_key(('en', None, None))) ++ self.assertTrue(('en', None, None) in locales._locales) + locales.loadLocale('en', 'US', None) +- self.assert_(locales._locales.has_key(('en', 'US', None))) ++ self.assertTrue(('en', 'US', None) in locales._locales) + locales.loadLocale('en', 'US', 'POSIX') +- self.assert_(locales._locales.has_key(('en', 'US', 'POSIX'))) ++ self.assertTrue(('en', 'US', 'POSIX') in locales._locales) + + def test_getLocale(self): + locale = locales.getLocale('en', 'GB') +--- src/zope/i18n/tests/test_formats.py.orig 2012-03-15 13:58:09 UTC ++++ src/zope/i18n/tests/test_formats.py +@@ -34,7 +34,7 @@ class LocaleStub(object): + + class LocaleCalendarStub(object): + +- type = u'gregorian' ++ type = 'gregorian' + + months = { 1: ('Januar', 'Jan'), 2: ('Februar', 'Feb'), + 3: ('Maerz', 'Mrz'), 4: ('April', 'Apr'), +@@ -59,7 +59,7 @@ class LocaleCalendarStub(object): + return [self.months.get(type, (None, None))[0] for type in range(1, 13)] + + def getMonthTypeFromName(self, name): +- for item in self.months.items(): ++ for item in list(self.months.items()): + if item[1][0] == name: + return item[0] + +@@ -67,7 +67,7 @@ class LocaleCalendarStub(object): + return [self.months.get(type, (None, None))[1] for type in range(1, 13)] + + def getMonthTypeFromAbbreviation(self, abbr): +- for item in self.months.items(): ++ for item in list(self.months.items()): + if item[1][1] == abbr: + return item[0] + +@@ -75,7 +75,7 @@ class LocaleCalendarStub(object): + return [self.days.get(type, (None, None))[0] for type in range(1, 8)] + + def getDayTypeFromName(self, name): +- for item in self.days.items(): ++ for item in list(self.days.items()): + if item[1][0] == name: + return item[0] + +@@ -83,7 +83,7 @@ class LocaleCalendarStub(object): + return [self.days.get(type, (None, None))[1] for type in range(1, 8)] + + def getDayTypeFromAbbreviation(self, abbr): +- for item in self.days.items(): ++ for item in list(self.days.items()): + if item[1][1] == abbr: + return item[0] + +@@ -171,14 +171,14 @@ class TestDateTimePatternParser(TestCase): + # Quote not closed + try: + parseDateTimePattern("HH' Uhr") +- except DateTimePatternParseError, err: ++ except DateTimePatternParseError as err: + self.assertEqual( + str(err), 'The quote starting at character 2 is not closed.') + # Test correct length of characters in datetime fields + try: + parseDateTimePattern("HHHHH") +- except DateTimePatternParseError, err: +- self.assert_(str(err).endswith('You have: 5')) ++ except DateTimePatternParseError as err: ++ self.assertTrue(str(err).endswith('You have: 5')) + + + class TestBuildDateTimeParseInfo(TestCase): +@@ -222,9 +222,9 @@ class TestBuildDateTimeParseInfo(TestCase): + self.assertEqual(self.info(('M', 2)), '([0-9]{2})') + + def testMonthNames(self): +- names = [u'Januar', u'Februar', u'Maerz', u'April', +- u'Mai', u'Juni', u'Juli', u'August', u'September', u'Oktober', +- u'November', u'Dezember'] ++ names = ['Januar', 'Februar', 'Maerz', 'April', ++ 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', ++ 'November', 'Dezember'] + self.assertEqual(self.info(('M', 4)), '('+'|'.join(names)+')') + + def testMonthAbbr(self): +@@ -255,26 +255,26 @@ class TestDateTimeFormat(TestCase): + format = DateTimeFormat(calendar=LocaleCalendarStub()) + + def testInterfaceConformity(self): +- self.assert_(IDateTimeFormat.providedBy(self.format)) ++ self.assertTrue(IDateTimeFormat.providedBy(self.format)) + + def testParseSimpleDateTime(self): + # German short + self.assertEqual( + self.format.parse('02.01.03 21:48', 'dd.MM.yy HH:mm'), +- datetime.datetime(2003, 01, 02, 21, 48)) ++ datetime.datetime(2003, 0o1, 0o2, 21, 48)) + + def testParseRealDateTime(self): + # German medium + self.assertEqual( + self.format.parse('02.01.2003 21:48:01', 'dd.MM.yyyy HH:mm:ss'), +- datetime.datetime(2003, 01, 02, 21, 48, 01)) ++ datetime.datetime(2003, 0o1, 0o2, 21, 48, 0o1)) + + # German long + # TODO: The parser does not support timezones yet. + self.assertEqual(self.format.parse( + '2. Januar 2003 21:48:01 +100', + 'd. MMMM yyyy HH:mm:ss z'), +- datetime.datetime(2003, 01, 02, 21, 48, 01, ++ datetime.datetime(2003, 0o1, 0o2, 21, 48, 0o1, + tzinfo=pytz.timezone('Europe/Berlin'))) + + # German full +@@ -282,13 +282,13 @@ class TestDateTimeFormat(TestCase): + self.assertEqual(self.format.parse( + 'Donnerstag, 2. Januar 2003 21:48 Uhr +100', + "EEEE, d. MMMM yyyy H:mm' Uhr 'z"), +- datetime.datetime(2003, 01, 02, 21, 48, ++ datetime.datetime(2003, 0o1, 0o2, 21, 48, + tzinfo=pytz.timezone('Europe/Berlin'))) + + def testParseAMPMDateTime(self): + self.assertEqual( + self.format.parse('02.01.03 09:48 nachm.', 'dd.MM.yy hh:mm a'), +- datetime.datetime(2003, 01, 02, 21, 48)) ++ datetime.datetime(2003, 0o1, 0o2, 21, 48)) + + def testParseTimeZone(self): + dt = self.format.parse('09:48 -600', 'HH:mm z') +@@ -352,28 +352,28 @@ class TestDateTimeFormat(TestCase): + def testParse12PM(self): + self.assertEqual( + self.format.parse('01.01.03 12:00 nachm.', 'dd.MM.yy hh:mm a'), +- datetime.datetime(2003, 01, 01, 12, 00, 00, 00)) ++ datetime.datetime(2003, 0o1, 0o1, 12, 00, 00, 00)) + + def testParseUnusualFormats(self): + self.assertEqual( + self.format.parse('001. Januar 03 0012:00', + 'ddd. MMMMM yy HHHH:mm'), +- datetime.datetime(2003, 01, 01, 12, 00, 00, 00)) ++ datetime.datetime(2003, 0o1, 0o1, 12, 00, 00, 00)) + self.assertEqual( + self.format.parse('0001. Jan 2003 0012:00 vorm.', + 'dddd. MMM yyyy hhhh:mm a'), +- datetime.datetime(2003, 01, 01, 00, 00, 00, 00)) ++ datetime.datetime(2003, 0o1, 0o1, 00, 00, 00, 00)) + + def testFormatSimpleDateTime(self): + # German short + self.assertEqual( +- self.format.format(datetime.datetime(2003, 01, 02, 21, 48), ++ self.format.format(datetime.datetime(2003, 0o1, 0o2, 21, 48), + 'dd.MM.yy HH:mm'), + '02.01.03 21:48') + + def testFormatRealDateTime(self): + tz = pytz.timezone('Europe/Berlin') +- dt = datetime.datetime(2003, 01, 02, 21, 48, 01, tzinfo=tz) ++ dt = datetime.datetime(2003, 0o1, 0o2, 21, 48, 0o1, tzinfo=tz) + # German medium + self.assertEqual( + self.format.format(dt, 'dd.MM.yyyy HH:mm:ss'), +@@ -391,47 +391,47 @@ class TestDateTimeFormat(TestCase): + + def testFormatAMPMDateTime(self): + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 21, 48), ++ datetime.datetime(2003, 0o1, 0o2, 21, 48), + 'dd.MM.yy hh:mm a'), + '02.01.03 09:48 nachm.') + + def testFormatAllWeekdays(self): + for day in range(1, 8): + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, day+5, 21, 48), ++ datetime.datetime(2003, 0o1, day+5, 21, 48), + "EEEE, d. MMMM yyyy H:mm' Uhr 'z"), + '%s, %i. Januar 2003 21:48 Uhr +000' %( + self.format.calendar.days[day][0], day+5)) + + def testFormatTimeZone(self): + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, 00), 'z'), ++ datetime.datetime(2003, 0o1, 0o2, 12, 00), 'z'), + '+000') + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, 00), 'zz'), ++ datetime.datetime(2003, 0o1, 0o2, 12, 00), 'zz'), + '+00:00') + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, 00), 'zzz'), ++ datetime.datetime(2003, 0o1, 0o2, 12, 00), 'zzz'), + 'UTC') + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, 00), 'zzzz'), ++ datetime.datetime(2003, 0o1, 0o2, 12, 00), 'zzzz'), + 'UTC') + tz = pytz.timezone('US/Eastern') + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, tzinfo=tz), 'z'), ++ datetime.datetime(2003, 0o1, 0o2, 12, tzinfo=tz), 'z'), + '-500') + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, tzinfo=tz), 'zz'), ++ datetime.datetime(2003, 0o1, 0o2, 12, tzinfo=tz), 'zz'), + '-05:00') + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, tzinfo=tz), 'zzz'), ++ datetime.datetime(2003, 0o1, 0o2, 12, tzinfo=tz), 'zzz'), + 'EST') + self.assertEqual(self.format.format( +- datetime.datetime(2003, 01, 02, 12, tzinfo=tz), 'zzzz'), ++ datetime.datetime(2003, 0o1, 0o2, 12, tzinfo=tz), 'zzzz'), + 'US/Eastern') + + def testFormatWeekDay(self): +- date = datetime.date(2003, 01, 02) ++ date = datetime.date(2003, 0o1, 0o2) + self.assertEqual(self.format.format(date, "E"), + '4') + self.assertEqual(self.format.format(date, "EE"), +@@ -455,7 +455,7 @@ class TestDateTimeFormat(TestCase): + '05') + + def testFormatDayOfWeekInMonth(self): +- date = datetime.date(2003, 01, 02) ++ date = datetime.date(2003, 0o1, 0o2) + self.assertEqual(self.format.format(date, "F"), + '1') + self.assertEqual(self.format.format(date, "FF"), +@@ -526,11 +526,11 @@ class TestDateTimeFormat(TestCase): + + def testFormatSimpleHourRepresentation(self): + self.assertEqual( +- self.format.format(datetime.datetime(2003, 01, 02, 23, 00), ++ self.format.format(datetime.datetime(2003, 0o1, 0o2, 23, 00), + 'dd.MM.yy h:mm:ss a'), + '02.01.03 11:00:00 nachm.') + self.assertEqual( +- self.format.format(datetime.datetime(2003, 01, 02, 02, 00), ++ self.format.format(datetime.datetime(2003, 0o1, 0o2, 0o2, 00), + 'dd.MM.yy h:mm:ss a'), + '02.01.03 2:00:00 vorm.') + self.assertEqual( +@@ -549,54 +549,54 @@ class TestDateTimeFormat(TestCase): + def testFormatDayInYear(self): + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'D'), +- u'3') ++ '3') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'DD'), +- u'03') ++ '03') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'DDD'), +- u'003') ++ '003') + self.assertEqual( + self.format.format(datetime.date(2003, 12, 31), 'D'), +- u'365') ++ '365') + self.assertEqual( + self.format.format(datetime.date(2003, 12, 31), 'DD'), +- u'365') ++ '365') + self.assertEqual( + self.format.format(datetime.date(2003, 12, 31), 'DDD'), +- u'365') ++ '365') + self.assertEqual( + self.format.format(datetime.date(2004, 12, 31), 'DDD'), +- u'366') ++ '366') + + def testFormatDayOfWeekInMOnth(self): + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'F'), +- u'1') ++ '1') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 10), 'F'), +- u'2') ++ '2') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 17), 'F'), +- u'3') ++ '3') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 24), 'F'), +- u'4') ++ '4') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 31), 'F'), +- u'5') ++ '5') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 6), 'F'), +- u'1') ++ '1') + + def testFormatUnusualFormats(self): + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'DDD-yyyy'), +- u'003-2003') ++ '003-2003') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 10), + "F. EEEE 'im' MMMM, yyyy"), +- u'2. Freitag im Januar, 2003') ++ '2. Freitag im Januar, 2003') + + + +@@ -828,7 +828,7 @@ class TestNumberFormat(TestCase): + 'infinity': 'oo', 'nan': 'N/A'}) + + def testInterfaceConformity(self): +- self.assert_(INumberFormat.providedBy(self.format)) ++ self.assertTrue(INumberFormat.providedBy(self.format)) + + def testParseSimpleInteger(self): + self.assertEqual(self.format.parse('23341', '###0'), diff --git a/devel/py-zope.processlifetime/Makefile b/devel/py-zope.processlifetime/Makefile index 103f2245363..d37c9df0639 100644 --- a/devel/py-zope.processlifetime/Makefile +++ b/devel/py-zope.processlifetime/Makefile @@ -16,4 +16,6 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}zope.interface>=0:devel/py-zope.interface@${ USES= python:3.6+ USE_PYTHON= distutils autoplist +NO_ARCH= yes + .include diff --git a/devel/py-zope.sequencesort/files/patch-2to3 b/devel/py-zope.sequencesort/files/patch-2to3 new file mode 100644 index 00000000000..35377d9a736 --- /dev/null +++ b/devel/py-zope.sequencesort/files/patch-2to3 @@ -0,0 +1,45 @@ +--- src/zope/sequencesort/ssort.py.orig 2007-10-03 03:58:37 UTC ++++ src/zope/sequencesort/ssort.py +@@ -66,13 +66,13 @@ def sort(sequence, sort=(), _=None, mapping=0): + + # clean the mess a bit + if multsort: # More than one sort key. +- sortfields = map(lambda x: x[0], sf_list) ++ sortfields = [x[0] for x in sf_list] + else: + sort = sf_list[0][0] + + elif sort: + if multsort: # More than one sort key. +- sortfields = map(lambda x: x[0], sort) ++ sortfields = [x[0] for x in sort] + else: + sort = sort[0][0] + +@@ -133,7 +133,7 @@ def nocase(str1, str2): + return cmp(str1.lower(), str2.lower()) + + import sys +-if sys.modules.has_key("locale"): # only if locale is already imported ++if "locale" in sys.modules: # only if locale is already imported + from locale import strcoll + + def strcoll_nocase(str1, str2): +@@ -157,7 +157,7 @@ def make_sortfunctions(sortfields, _): + elif l == 3: + pass + else: +- raise SyntaxError, "sort option must contains no more than 2 fields" ++ raise SyntaxError("sort option must contains no more than 2 fields") + + f_name = f[1] + +@@ -184,7 +184,7 @@ def make_sortfunctions(sortfields, _): + elif sort_order == "desc": + multiplier = -1 + else: +- raise SyntaxError, "sort direction must be either ASC or DESC" ++ raise SyntaxError("sort direction must be either ASC or DESC") + + sf_list.append((f[0], func, multiplier)) + diff --git a/devel/py-zope.size/files/patch-2to3 b/devel/py-zope.size/files/patch-2to3 new file mode 100644 index 00000000000..ea7cb7be636 --- /dev/null +++ b/devel/py-zope.size/files/patch-2to3 @@ -0,0 +1,78 @@ +--- src/zope/size/tests.py.orig 2011-11-29 18:28:40 UTC ++++ src/zope/size/tests.py +@@ -26,7 +26,7 @@ class ZCMLTest(unittest.TestCase): + try: + zope.configuration.xmlconfig.XMLConfig( + 'configure.zcml', zope.size)() +- except Exception, e: ++ except Exception as e: + self.fail(e) + + def test_configure_should_register_n_components(self): +@@ -58,54 +58,54 @@ class Test(unittest.TestCase): + def testImplementsISized(self): + from zope.size import DefaultSized + sized = DefaultSized(object()) +- self.assert_(ISized.providedBy(sized)) ++ self.assertTrue(ISized.providedBy(sized)) + + def testSizeWithBytes(self): + from zope.size import DefaultSized + obj = DummyObject(1023) + sized = DefaultSized(obj) + self.assertEqual(sized.sizeForSorting(), ('byte', 1023)) +- self.assertEqual(sized.sizeForDisplay(), u'1 KB') ++ self.assertEqual(sized.sizeForDisplay(), '1 KB') + + def testSizeWithNone(self): + from zope.size import DefaultSized + obj = DummyObject(None) + sized = DefaultSized(obj) + self.assertEqual(sized.sizeForSorting(), (None, None)) +- self.assertEqual(sized.sizeForDisplay(), u'not-available') ++ self.assertEqual(sized.sizeForDisplay(), 'not-available') + + def testSizeNotAvailable(self): + from zope.size import DefaultSized + sized = DefaultSized(object()) + self.assertEqual(sized.sizeForSorting(), (None, None)) +- self.assertEqual(sized.sizeForDisplay(), u'not-available') ++ self.assertEqual(sized.sizeForDisplay(), 'not-available') + + def testVariousSizes(self): + from zope.size import DefaultSized + + sized = DefaultSized(DummyObject(0)) + self.assertEqual(sized.sizeForSorting(), ('byte', 0)) +- self.assertEqual(sized.sizeForDisplay(), u'0 KB') ++ self.assertEqual(sized.sizeForDisplay(), '0 KB') + + sized = DefaultSized(DummyObject(1)) + self.assertEqual(sized.sizeForSorting(), ('byte', 1)) +- self.assertEqual(sized.sizeForDisplay(), u'1 KB') ++ self.assertEqual(sized.sizeForDisplay(), '1 KB') + + sized = DefaultSized(DummyObject(2048)) + self.assertEqual(sized.sizeForSorting(), ('byte', 2048)) +- self.assertEqual(sized.sizeForDisplay(), u'${size} KB') ++ self.assertEqual(sized.sizeForDisplay(), '${size} KB') + self.assertEqual(sized.sizeForDisplay().mapping, {'size': '2'}) + + sized = DefaultSized(DummyObject(2000000)) + self.assertEqual(sized.sizeForSorting(), ('byte', 2000000)) +- self.assertEqual(sized.sizeForDisplay(), u'${size} MB') ++ self.assertEqual(sized.sizeForDisplay(), '${size} MB') + self.assertEqual(sized.sizeForDisplay().mapping, {'size': '1.91'}) + + def test_byteDisplay(self): + from zope.size import byteDisplay +- self.assertEqual(byteDisplay(0), u'0 KB') +- self.assertEqual(byteDisplay(1), u'1 KB') +- self.assertEqual(byteDisplay(2048), u'${size} KB') ++ self.assertEqual(byteDisplay(0), '0 KB') ++ self.assertEqual(byteDisplay(1), '1 KB') ++ self.assertEqual(byteDisplay(2048), '${size} KB') + self.assertEqual(byteDisplay(2048).mapping, {'size': '2'}) +- self.assertEqual(byteDisplay(2000000), u'${size} MB') ++ self.assertEqual(byteDisplay(2000000), '${size} MB') + self.assertEqual(byteDisplay(2000000).mapping, {'size': '1.91'}) diff --git a/devel/qscintilla2-qt5/distinfo b/devel/qscintilla2-qt5/distinfo index 4fda184347f..9bcd3b77c29 100644 --- a/devel/qscintilla2-qt5/distinfo +++ b/devel/qscintilla2-qt5/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643274895 -SHA256 (QScintilla_src-2.13.1.tar.gz) = 800e3d2071a96bcccd7581346af0d2fe28fc30cd68530cb8302685d013afd54a -SIZE (QScintilla_src-2.13.1.tar.gz) = 3059819 +TIMESTAMP = 1648212625 +SHA256 (QScintilla_src-2.13.2.tar.gz) = b6c7e5f27b51d25f09fe6cf84ae9a7f0876af0d65d8ccb551109e6e7b25885f4 +SIZE (QScintilla_src-2.13.2.tar.gz) = 3059999 diff --git a/devel/regexxer/Makefile b/devel/regexxer/Makefile index 5e6b29dc458..a13b3bab0ae 100644 --- a/devel/regexxer/Makefile +++ b/devel/regexxer/Makefile @@ -2,7 +2,7 @@ PORTNAME= regexxer PORTVERSION= 0.10 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= devel textproc gnome MASTER_SITES= GNOME diff --git a/devel/rkcommon/Makefile b/devel/rkcommon/Makefile index 2752a033e40..eb1f0d1f728 100644 --- a/devel/rkcommon/Makefile +++ b/devel/rkcommon/Makefile @@ -23,4 +23,10 @@ CMAKE_TESTING_ON= BUILD_TESTING PLIST_SUB= PORTVERSION=${PORTVERSION} +.include + +.if ${ARCH} != aarch64 && ${ARCH} != amd64 && !${ARCH:Marmv?} && ${ARCH} != i386 +CMAKE_ARGS+= -DRKCOMMON_NO_SIMD:BOOL=ON +.endif + .include diff --git a/devel/rubygem-actionview52/Makefile b/devel/rubygem-actionview52/Makefile index 776cc4c9cc7..c8f1516c911 100644 --- a/devel/rubygem-actionview52/Makefile +++ b/devel/rubygem-actionview52/Makefile @@ -1,7 +1,7 @@ # Created by: Steve Wills PORTNAME= actionview -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/devel/rubygem-actionview52/distinfo b/devel/rubygem-actionview52/distinfo index 81a109a9a91..663654dcf70 100644 --- a/devel/rubygem-actionview52/distinfo +++ b/devel/rubygem-actionview52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298698 -SHA256 (rubygem/actionview-5.2.6.gem) = ef2f9ca0ab876e890d5508da547bef6e7248f16e89c42bd35510d33b4510c819 -SIZE (rubygem/actionview-5.2.6.gem) = 163328 +TIMESTAMP = 1647264890 +SHA256 (rubygem/actionview-5.2.7.gem) = f38a5c2098cd553e83ec12892f28c41cf419143c29c7112acb541ea09e3fcef5 +SIZE (rubygem/actionview-5.2.7.gem) = 163328 diff --git a/devel/rubygem-actionview60/Makefile b/devel/rubygem-actionview60/Makefile index f0c69f063c1..4141b39726d 100644 --- a/devel/rubygem-actionview60/Makefile +++ b/devel/rubygem-actionview60/Makefile @@ -1,7 +1,7 @@ # Created by: Steve Wills PORTNAME= actionview -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/devel/rubygem-actionview60/distinfo b/devel/rubygem-actionview60/distinfo index c2975e84df6..887f9fe5668 100644 --- a/devel/rubygem-actionview60/distinfo +++ b/devel/rubygem-actionview60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058906 -SHA256 (rubygem/actionview-6.0.4.6.gem) = 81941c1e0bdf04448a775b5f1a6467bd50bae0c190f8481f3e8b0d0fc0fe2b82 -SIZE (rubygem/actionview-6.0.4.6.gem) = 169984 +TIMESTAMP = 1647264912 +SHA256 (rubygem/actionview-6.0.4.7.gem) = a1492dc0544d0959433d87f37cb621c7455146733009e91e827416b1555e4cbe +SIZE (rubygem/actionview-6.0.4.7.gem) = 169984 diff --git a/devel/rubygem-actionview61/Makefile b/devel/rubygem-actionview61/Makefile index 1ebb855a70a..36b2b34026b 100644 --- a/devel/rubygem-actionview61/Makefile +++ b/devel/rubygem-actionview61/Makefile @@ -1,7 +1,7 @@ # Created by: Steve Wills PORTNAME= actionview -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/devel/rubygem-actionview61/distinfo b/devel/rubygem-actionview61/distinfo index 7e30a24a713..ef8934ef306 100644 --- a/devel/rubygem-actionview61/distinfo +++ b/devel/rubygem-actionview61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058932 -SHA256 (rubygem/actionview-6.1.4.6.gem) = 73d24fa9b9a90adcb05060cb506e467afeea49c605aae35333f59ef70062bd32 -SIZE (rubygem/actionview-6.1.4.6.gem) = 171520 +TIMESTAMP = 1647264938 +SHA256 (rubygem/actionview-6.1.4.7.gem) = 4cfd1cb5f11675cf710bc062bbf113e271658a425fb6bb532ef2c1257067a80b +SIZE (rubygem/actionview-6.1.4.7.gem) = 171520 diff --git a/devel/rubygem-actionview70/Makefile b/devel/rubygem-actionview70/Makefile index 42e3a2efb01..f43057f843a 100644 --- a/devel/rubygem-actionview70/Makefile +++ b/devel/rubygem-actionview70/Makefile @@ -1,5 +1,5 @@ PORTNAME= actionview -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/devel/rubygem-actionview70/distinfo b/devel/rubygem-actionview70/distinfo index 53adf7fcb43..74cdded1ba8 100644 --- a/devel/rubygem-actionview70/distinfo +++ b/devel/rubygem-actionview70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058958 -SHA256 (rubygem/actionview-7.0.2.gem) = 097077cab8eff144e1cddaae8316baf0a3abd6d987e0e8621749ef1672252cd4 -SIZE (rubygem/actionview-7.0.2.gem) = 178176 +TIMESTAMP = 1647264964 +SHA256 (rubygem/actionview-7.0.2.3.gem) = 109000cd4a3dbfe5a05420584a44ccd89ed32a616642baf07696ca24298e8dda +SIZE (rubygem/actionview-7.0.2.3.gem) = 178176 diff --git a/devel/rubygem-activejob52/Makefile b/devel/rubygem-activejob52/Makefile index 803de2cae03..f8e74684e00 100644 --- a/devel/rubygem-activejob52/Makefile +++ b/devel/rubygem-activejob52/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Jost Meixner PORTNAME= activejob -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/devel/rubygem-activejob52/distinfo b/devel/rubygem-activejob52/distinfo index 6e74ea8bd36..80260b94891 100644 --- a/devel/rubygem-activejob52/distinfo +++ b/devel/rubygem-activejob52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298700 -SHA256 (rubygem/activejob-5.2.6.gem) = cdf6435547ede7d2a7390f0e92f703421984e2794cb8b7086c8ce8f5d7ecbbb1 -SIZE (rubygem/activejob-5.2.6.gem) = 26624 +TIMESTAMP = 1647264892 +SHA256 (rubygem/activejob-5.2.7.gem) = b5e576fa3814e72d9653edaee5285dd0045be02bed784e2ad19980a158c3d0fb +SIZE (rubygem/activejob-5.2.7.gem) = 26624 diff --git a/devel/rubygem-activejob60/Makefile b/devel/rubygem-activejob60/Makefile index fe49ab4db41..54358c87e2c 100644 --- a/devel/rubygem-activejob60/Makefile +++ b/devel/rubygem-activejob60/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Jost Meixner PORTNAME= activejob -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/devel/rubygem-activejob60/distinfo b/devel/rubygem-activejob60/distinfo index bdfa21d2371..9411da088e1 100644 --- a/devel/rubygem-activejob60/distinfo +++ b/devel/rubygem-activejob60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058908 -SHA256 (rubygem/activejob-6.0.4.6.gem) = c27514ef9525227e6ae1d87c24bd87b802da99613dcc85cc2996491f608cad71 -SIZE (rubygem/activejob-6.0.4.6.gem) = 31744 +TIMESTAMP = 1647264914 +SHA256 (rubygem/activejob-6.0.4.7.gem) = fb99fbffe6829e52b1dde7231d972c368fd5f69a7eeb9395a6c68b3328046edf +SIZE (rubygem/activejob-6.0.4.7.gem) = 31744 diff --git a/devel/rubygem-activejob61/Makefile b/devel/rubygem-activejob61/Makefile index df390341dfa..16af87423d9 100644 --- a/devel/rubygem-activejob61/Makefile +++ b/devel/rubygem-activejob61/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Jost Meixner PORTNAME= activejob -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/devel/rubygem-activejob61/distinfo b/devel/rubygem-activejob61/distinfo index effb2d8e58e..c0855534cdd 100644 --- a/devel/rubygem-activejob61/distinfo +++ b/devel/rubygem-activejob61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058934 -SHA256 (rubygem/activejob-6.1.4.6.gem) = 34f4d2946a48ee78c1742c0475cd4fc0b6d35dbe768c839345e9f2574dc8b609 -SIZE (rubygem/activejob-6.1.4.6.gem) = 32768 +TIMESTAMP = 1647264940 +SHA256 (rubygem/activejob-6.1.4.7.gem) = 72661071c075ab7c9515f33a458338ecd5cf57102a3a289dd4631ef965bf0dbd +SIZE (rubygem/activejob-6.1.4.7.gem) = 32768 diff --git a/devel/rubygem-activejob70/Makefile b/devel/rubygem-activejob70/Makefile index 0d002463003..cf871f9b650 100644 --- a/devel/rubygem-activejob70/Makefile +++ b/devel/rubygem-activejob70/Makefile @@ -1,5 +1,5 @@ PORTNAME= activejob -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/devel/rubygem-activejob70/distinfo b/devel/rubygem-activejob70/distinfo index 485ebebb65f..c137b43871f 100644 --- a/devel/rubygem-activejob70/distinfo +++ b/devel/rubygem-activejob70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058960 -SHA256 (rubygem/activejob-7.0.2.gem) = 3f7d3209b9f376bdaa4bda683bbc9544629812876c3244fdc870b0ca38f10089 -SIZE (rubygem/activejob-7.0.2.gem) = 32768 +TIMESTAMP = 1647264966 +SHA256 (rubygem/activejob-7.0.2.3.gem) = d39cf8a3d3bd984363d1a8aeb580e7c2967f8c4f579997d2afe7b20ef78fcc8b +SIZE (rubygem/activejob-7.0.2.3.gem) = 32768 diff --git a/devel/rubygem-activesupport52/Makefile b/devel/rubygem-activesupport52/Makefile index 820befe5e69..7d182084a26 100644 --- a/devel/rubygem-activesupport52/Makefile +++ b/devel/rubygem-activesupport52/Makefile @@ -1,7 +1,7 @@ # Created by: Jonathan Weiss () PORTNAME= activesupport -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/devel/rubygem-activesupport52/distinfo b/devel/rubygem-activesupport52/distinfo index de3e095707f..a56ed785515 100644 --- a/devel/rubygem-activesupport52/distinfo +++ b/devel/rubygem-activesupport52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298702 -SHA256 (rubygem/activesupport-5.2.6.gem) = 7249ee13859fc99ed2c833048674fd28c11605c679f1c65035190a2219e9cbef -SIZE (rubygem/activesupport-5.2.6.gem) = 381952 +TIMESTAMP = 1647264894 +SHA256 (rubygem/activesupport-5.2.7.gem) = dde769b17edc4c2402c940a4cb1b6efc3eb046ffef42da606452d36e6a9bde14 +SIZE (rubygem/activesupport-5.2.7.gem) = 382464 diff --git a/devel/rubygem-activesupport60/Makefile b/devel/rubygem-activesupport60/Makefile index 399194bd3fe..a7fdbff7f08 100644 --- a/devel/rubygem-activesupport60/Makefile +++ b/devel/rubygem-activesupport60/Makefile @@ -1,7 +1,7 @@ # Created by: Jonathan Weiss () PORTNAME= activesupport -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/devel/rubygem-activesupport60/distinfo b/devel/rubygem-activesupport60/distinfo index 236fab49b59..f24ff128014 100644 --- a/devel/rubygem-activesupport60/distinfo +++ b/devel/rubygem-activesupport60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058910 -SHA256 (rubygem/activesupport-6.0.4.6.gem) = 22370157cdebd0d4793cb99e31528d1173bb10b877726568471b581580ba12a3 -SIZE (rubygem/activesupport-6.0.4.6.gem) = 214528 +TIMESTAMP = 1647264916 +SHA256 (rubygem/activesupport-6.0.4.7.gem) = 0788197cd50ad04e6a8270bf68afead5214e68eabde646f0cf1980488f78fc96 +SIZE (rubygem/activesupport-6.0.4.7.gem) = 214528 diff --git a/devel/rubygem-activesupport61/Makefile b/devel/rubygem-activesupport61/Makefile index 9898d322eae..1af325b5bed 100644 --- a/devel/rubygem-activesupport61/Makefile +++ b/devel/rubygem-activesupport61/Makefile @@ -1,7 +1,7 @@ # Created by: Jonathan Weiss () PORTNAME= activesupport -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/devel/rubygem-activesupport61/distinfo b/devel/rubygem-activesupport61/distinfo index 89f5071c2e7..d136d62e4df 100644 --- a/devel/rubygem-activesupport61/distinfo +++ b/devel/rubygem-activesupport61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058936 -SHA256 (rubygem/activesupport-6.1.4.6.gem) = 65340561cbff04e794a13f219bd017e165d020fca84d3ebdc8f9918fdf263f6f -SIZE (rubygem/activesupport-6.1.4.6.gem) = 219648 +TIMESTAMP = 1647264942 +SHA256 (rubygem/activesupport-6.1.4.7.gem) = 896fe65a494306c35371d57a398d3e6073ad4ab6228ccca5f8ac2671f6634912 +SIZE (rubygem/activesupport-6.1.4.7.gem) = 219648 diff --git a/devel/rubygem-activesupport70/Makefile b/devel/rubygem-activesupport70/Makefile index bb04c73cd46..a529218128a 100644 --- a/devel/rubygem-activesupport70/Makefile +++ b/devel/rubygem-activesupport70/Makefile @@ -1,5 +1,5 @@ PORTNAME= activesupport -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/devel/rubygem-activesupport70/distinfo b/devel/rubygem-activesupport70/distinfo index 6a97930e379..6f0b426f026 100644 --- a/devel/rubygem-activesupport70/distinfo +++ b/devel/rubygem-activesupport70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058962 -SHA256 (rubygem/activesupport-7.0.2.gem) = a1c24e125460325933b71b6287462dda6d7b60984436d690943cfcc94506557c -SIZE (rubygem/activesupport-7.0.2.gem) = 222208 +TIMESTAMP = 1647264968 +SHA256 (rubygem/activesupport-7.0.2.3.gem) = 28ba7f01792bd3fb135ef8618199799441568f89cc3890c78b0e4812486bfeca +SIZE (rubygem/activesupport-7.0.2.3.gem) = 222208 diff --git a/devel/rubygem-async-io/Makefile b/devel/rubygem-async-io/Makefile index c7db2afbe19..25bbeb75dae 100644 --- a/devel/rubygem-async-io/Makefile +++ b/devel/rubygem-async-io/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= async-io -PORTVERSION= 1.32.2 +PORTVERSION= 1.33.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-async-io/distinfo b/devel/rubygem-async-io/distinfo index 49c25096916..d27ae0477b5 100644 --- a/devel/rubygem-async-io/distinfo +++ b/devel/rubygem-async-io/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1626542094 -SHA256 (rubygem/async-io-1.32.2.gem) = 088624d8d79723179d373ce5e8a7b423742969a6ca55c2ba527a3b273aa88982 -SIZE (rubygem/async-io-1.32.2.gem) = 19456 +TIMESTAMP = 1647264722 +SHA256 (rubygem/async-io-1.33.0.gem) = b4bf9f135ba1e2844a6bae51cbfd41c074b62604b56792658235000978c7c59a +SIZE (rubygem/async-io-1.33.0.gem) = 19456 diff --git a/devel/rubygem-aws-partitions/Makefile b/devel/rubygem-aws-partitions/Makefile index 0e6180851c7..0c6ddc63899 100644 --- a/devel/rubygem-aws-partitions/Makefile +++ b/devel/rubygem-aws-partitions/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-partitions -PORTVERSION= 1.559.0 +PORTVERSION= 1.566.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-partitions/distinfo b/devel/rubygem-aws-partitions/distinfo index b5e924f5d27..c3c2e00e421 100644 --- a/devel/rubygem-aws-partitions/distinfo +++ b/devel/rubygem-aws-partitions/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058186 -SHA256 (rubygem/aws-partitions-1.559.0.gem) = dc1d99426ffb9f15c28fc24b56aaa3db9ed29d1017f6534e12088272f911f196 -SIZE (rubygem/aws-partitions-1.559.0.gem) = 46080 +TIMESTAMP = 1647264724 +SHA256 (rubygem/aws-partitions-1.566.0.gem) = 8907f8ba0453028af1bb892760aafb57d3754791e32d632e65032fff01a8c95f +SIZE (rubygem/aws-partitions-1.566.0.gem) = 46080 diff --git a/devel/rubygem-aws-sdk-amplify/Makefile b/devel/rubygem-aws-sdk-amplify/Makefile index ebf76711256..11ccb224524 100644 --- a/devel/rubygem-aws-sdk-amplify/Makefile +++ b/devel/rubygem-aws-sdk-amplify/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-amplify -PORTVERSION= 1.39.0 +PORTVERSION= 1.40.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-amplify/distinfo b/devel/rubygem-aws-sdk-amplify/distinfo index 4c4094068f5..009c3a4ae07 100644 --- a/devel/rubygem-aws-sdk-amplify/distinfo +++ b/devel/rubygem-aws-sdk-amplify/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058198 -SHA256 (rubygem/aws-sdk-amplify-1.39.0.gem) = b10e6ac0c7cbf4fdaa6141f84df0543e694629fac35da884c93743ab022f9eca -SIZE (rubygem/aws-sdk-amplify-1.39.0.gem) = 44032 +TIMESTAMP = 1647264726 +SHA256 (rubygem/aws-sdk-amplify-1.40.0.gem) = 5723c03597815c7f51e07120dff3d4939c375d4b24d2c7606ea276da50c3e1b0 +SIZE (rubygem/aws-sdk-amplify-1.40.0.gem) = 44032 diff --git a/devel/rubygem-aws-sdk-amplifyuibuilder/Makefile b/devel/rubygem-aws-sdk-amplifyuibuilder/Makefile index 42f4c5ee589..1a6d84dcbb4 100644 --- a/devel/rubygem-aws-sdk-amplifyuibuilder/Makefile +++ b/devel/rubygem-aws-sdk-amplifyuibuilder/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-amplifyuibuilder -PORTVERSION= 1.3.0 +PORTVERSION= 1.4.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-amplifyuibuilder/distinfo b/devel/rubygem-aws-sdk-amplifyuibuilder/distinfo index e4fc3465a4f..eba7fa8366b 100644 --- a/devel/rubygem-aws-sdk-amplifyuibuilder/distinfo +++ b/devel/rubygem-aws-sdk-amplifyuibuilder/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058202 -SHA256 (rubygem/aws-sdk-amplifyuibuilder-1.3.0.gem) = fe4b6a32e021ea63af442c640e24af77e3ad7a43364b72e834faf68571cd6a3d -SIZE (rubygem/aws-sdk-amplifyuibuilder-1.3.0.gem) = 33280 +TIMESTAMP = 1647264728 +SHA256 (rubygem/aws-sdk-amplifyuibuilder-1.4.0.gem) = 8877074bc514329e121b7279776fbdce780bea576d07c3d465ac273c940f2d66 +SIZE (rubygem/aws-sdk-amplifyuibuilder-1.4.0.gem) = 52736 diff --git a/devel/rubygem-aws-sdk-appflow/Makefile b/devel/rubygem-aws-sdk-appflow/Makefile index 030d7b22c72..32c51136531 100644 --- a/devel/rubygem-aws-sdk-appflow/Makefile +++ b/devel/rubygem-aws-sdk-appflow/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-appflow -PORTVERSION= 1.24.0 +PORTVERSION= 1.25.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-appflow/distinfo b/devel/rubygem-aws-sdk-appflow/distinfo index aff8f07f0da..8c5bdf7f8b0 100644 --- a/devel/rubygem-aws-sdk-appflow/distinfo +++ b/devel/rubygem-aws-sdk-appflow/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058214 -SHA256 (rubygem/aws-sdk-appflow-1.24.0.gem) = a6e9361690e3b20ad7e918203b0dad339392cacee5882c6499d1dc7250481767 -SIZE (rubygem/aws-sdk-appflow-1.24.0.gem) = 74240 +TIMESTAMP = 1647264730 +SHA256 (rubygem/aws-sdk-appflow-1.25.0.gem) = 6f2fec1c881c19549e6014846035ac5c626976e741862ff0e0c918461467c8a1 +SIZE (rubygem/aws-sdk-appflow-1.25.0.gem) = 74240 diff --git a/devel/rubygem-aws-sdk-appregistry/Makefile b/devel/rubygem-aws-sdk-appregistry/Makefile index c41c5b99108..19a56b39f8f 100644 --- a/devel/rubygem-aws-sdk-appregistry/Makefile +++ b/devel/rubygem-aws-sdk-appregistry/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-appregistry -PORTVERSION= 1.14.0 +PORTVERSION= 1.15.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-appregistry/distinfo b/devel/rubygem-aws-sdk-appregistry/distinfo index 1230d5146a8..9cbdfa80e5a 100644 --- a/devel/rubygem-aws-sdk-appregistry/distinfo +++ b/devel/rubygem-aws-sdk-appregistry/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058228 -SHA256 (rubygem/aws-sdk-appregistry-1.14.0.gem) = 8f780d44f7977ef182bbf33b44ac79f6076648138c11e7d77c06ad1650f60697 -SIZE (rubygem/aws-sdk-appregistry-1.14.0.gem) = 26112 +TIMESTAMP = 1647264732 +SHA256 (rubygem/aws-sdk-appregistry-1.15.0.gem) = dd94da8ff306dc9b0ee5a68393b926b8dc03625a4be0e2822ac9422eaf82697a +SIZE (rubygem/aws-sdk-appregistry-1.15.0.gem) = 26624 diff --git a/devel/rubygem-aws-sdk-athena/Makefile b/devel/rubygem-aws-sdk-athena/Makefile index 51241780f1e..a28a359f97a 100644 --- a/devel/rubygem-aws-sdk-athena/Makefile +++ b/devel/rubygem-aws-sdk-athena/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-athena -PORTVERSION= 1.50.0 +PORTVERSION= 1.52.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-athena/distinfo b/devel/rubygem-aws-sdk-athena/distinfo index 9024c26b66f..32a4ad2eab4 100644 --- a/devel/rubygem-aws-sdk-athena/distinfo +++ b/devel/rubygem-aws-sdk-athena/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058236 -SHA256 (rubygem/aws-sdk-athena-1.50.0.gem) = 281dc81fb8f0776237e736a287229e4484f4a8af20c507a03fc009f3dba8ed59 -SIZE (rubygem/aws-sdk-athena-1.50.0.gem) = 46592 +TIMESTAMP = 1647264734 +SHA256 (rubygem/aws-sdk-athena-1.52.0.gem) = d98f8dafb48553d8b5a03a2d47c4e0bc2e1fa220c31327608cc8a1dd830d9cfe +SIZE (rubygem/aws-sdk-athena-1.52.0.gem) = 48128 diff --git a/devel/rubygem-aws-sdk-chime/Makefile b/devel/rubygem-aws-sdk-chime/Makefile index db55ec93d45..d617fec65b8 100644 --- a/devel/rubygem-aws-sdk-chime/Makefile +++ b/devel/rubygem-aws-sdk-chime/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-chime -PORTVERSION= 1.66.0 +PORTVERSION= 1.67.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-chime/distinfo b/devel/rubygem-aws-sdk-chime/distinfo index d8af3a0210e..1464335f8b5 100644 --- a/devel/rubygem-aws-sdk-chime/distinfo +++ b/devel/rubygem-aws-sdk-chime/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058256 -SHA256 (rubygem/aws-sdk-chime-1.66.0.gem) = dc7685b75d572232bd7c7e45bfb2addca48b4167f452ff3380feff6ecb2b11ab -SIZE (rubygem/aws-sdk-chime-1.66.0.gem) = 120320 +TIMESTAMP = 1647264736 +SHA256 (rubygem/aws-sdk-chime-1.67.0.gem) = da76563b47645fc344fdcb687926e5a9e3c4526c22dc6d3f01d1344e3d8ece48 +SIZE (rubygem/aws-sdk-chime-1.67.0.gem) = 120832 diff --git a/devel/rubygem-aws-sdk-chimesdkmeetings/Makefile b/devel/rubygem-aws-sdk-chimesdkmeetings/Makefile index f700169d5d3..539268b2f82 100644 --- a/devel/rubygem-aws-sdk-chimesdkmeetings/Makefile +++ b/devel/rubygem-aws-sdk-chimesdkmeetings/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-chimesdkmeetings -PORTVERSION= 1.7.0 +PORTVERSION= 1.8.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-chimesdkmeetings/distinfo b/devel/rubygem-aws-sdk-chimesdkmeetings/distinfo index 288ca443064..c84a9010881 100644 --- a/devel/rubygem-aws-sdk-chimesdkmeetings/distinfo +++ b/devel/rubygem-aws-sdk-chimesdkmeetings/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058260 -SHA256 (rubygem/aws-sdk-chimesdkmeetings-1.7.0.gem) = bed74fcaa1c40ee1f5082d6dd0475961933d76fa65460b0c659f6e2941cc8397 -SIZE (rubygem/aws-sdk-chimesdkmeetings-1.7.0.gem) = 26112 +TIMESTAMP = 1647264738 +SHA256 (rubygem/aws-sdk-chimesdkmeetings-1.8.0.gem) = 0627a62c25368c4f81bf477286ce7088a5a59beb6eaeec646053c00d517aba29 +SIZE (rubygem/aws-sdk-chimesdkmeetings-1.8.0.gem) = 26112 diff --git a/devel/rubygem-aws-sdk-cloudtrail/Makefile b/devel/rubygem-aws-sdk-cloudtrail/Makefile index daf83f663fe..8cd95774cc2 100644 --- a/devel/rubygem-aws-sdk-cloudtrail/Makefile +++ b/devel/rubygem-aws-sdk-cloudtrail/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-cloudtrail -PORTVERSION= 1.47.0 +PORTVERSION= 1.48.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-cloudtrail/distinfo b/devel/rubygem-aws-sdk-cloudtrail/distinfo index 6554417ee92..e5a48f094e1 100644 --- a/devel/rubygem-aws-sdk-cloudtrail/distinfo +++ b/devel/rubygem-aws-sdk-cloudtrail/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058282 -SHA256 (rubygem/aws-sdk-cloudtrail-1.47.0.gem) = db9afc42c7ef8d864e5df3f38a82fb25553593fbf7316b5efde400d2fe2a782f -SIZE (rubygem/aws-sdk-cloudtrail-1.47.0.gem) = 59392 +TIMESTAMP = 1647264740 +SHA256 (rubygem/aws-sdk-cloudtrail-1.48.0.gem) = acdb92b590ef07c6c842004dfb19f180af137982031974c8892c98dda4bab125 +SIZE (rubygem/aws-sdk-cloudtrail-1.48.0.gem) = 59904 diff --git a/devel/rubygem-aws-sdk-comprehend/Makefile b/devel/rubygem-aws-sdk-comprehend/Makefile index b41d46c4aa9..4a70c60c12d 100644 --- a/devel/rubygem-aws-sdk-comprehend/Makefile +++ b/devel/rubygem-aws-sdk-comprehend/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-comprehend -PORTVERSION= 1.59.0 +PORTVERSION= 1.60.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-comprehend/distinfo b/devel/rubygem-aws-sdk-comprehend/distinfo index 291d7b84713..84c9eb49d3b 100644 --- a/devel/rubygem-aws-sdk-comprehend/distinfo +++ b/devel/rubygem-aws-sdk-comprehend/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058320 -SHA256 (rubygem/aws-sdk-comprehend-1.59.0.gem) = 35c93d232141f01f6ec601af900327b3c0a8da9bb3955886cf615c6c060a3164 -SIZE (rubygem/aws-sdk-comprehend-1.59.0.gem) = 79872 +TIMESTAMP = 1647264742 +SHA256 (rubygem/aws-sdk-comprehend-1.60.0.gem) = 6f185b76390271664c9b09a2a10a40c33be6398d327815d90bbf8d3f082ada19 +SIZE (rubygem/aws-sdk-comprehend-1.60.0.gem) = 81920 diff --git a/devel/rubygem-aws-sdk-connect/Makefile b/devel/rubygem-aws-sdk-connect/Makefile index 2d00ad0734c..047d6aab157 100644 --- a/devel/rubygem-aws-sdk-connect/Makefile +++ b/devel/rubygem-aws-sdk-connect/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-connect -PORTVERSION= 1.65.0 +PORTVERSION= 1.67.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-connect/distinfo b/devel/rubygem-aws-sdk-connect/distinfo index b3b75c016d2..91a59baf7d8 100644 --- a/devel/rubygem-aws-sdk-connect/distinfo +++ b/devel/rubygem-aws-sdk-connect/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058328 -SHA256 (rubygem/aws-sdk-connect-1.65.0.gem) = c059902dbfc037a76f1633a53f166bf2c0b9fd924b428827e7a05f1799b07307 -SIZE (rubygem/aws-sdk-connect-1.65.0.gem) = 103424 +TIMESTAMP = 1647264744 +SHA256 (rubygem/aws-sdk-connect-1.67.0.gem) = 8b412841840ee69f57e2865c00c6224b7d634fd11089b3ccf1c5f9fbc1616cb3 +SIZE (rubygem/aws-sdk-connect-1.67.0.gem) = 103936 diff --git a/devel/rubygem-aws-sdk-core/Makefile b/devel/rubygem-aws-sdk-core/Makefile index 875c13e9f49..efe0639f443 100644 --- a/devel/rubygem-aws-sdk-core/Makefile +++ b/devel/rubygem-aws-sdk-core/Makefile @@ -1,5 +1,5 @@ PORTNAME= aws-sdk-core -PORTVERSION= 3.127.0 +PORTVERSION= 3.130.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-core/distinfo b/devel/rubygem-aws-sdk-core/distinfo index 960fc0c52e5..2ec00bac01f 100644 --- a/devel/rubygem-aws-sdk-core/distinfo +++ b/devel/rubygem-aws-sdk-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058336 -SHA256 (rubygem/aws-sdk-core-3.127.0.gem) = 384e376dd8753b313f08deb7346f549d0e580be21e86dec42f43a0dd9af8b932 -SIZE (rubygem/aws-sdk-core-3.127.0.gem) = 311296 +TIMESTAMP = 1647264746 +SHA256 (rubygem/aws-sdk-core-3.130.0.gem) = 7547757afb0e340e7011381635db468d9bbcd7c6ea97bfefdccf9069c0ddd593 +SIZE (rubygem/aws-sdk-core-3.130.0.gem) = 312832 diff --git a/devel/rubygem-aws-sdk-devopsguru/Makefile b/devel/rubygem-aws-sdk-devopsguru/Makefile index f6927398c8a..7515d25bca9 100644 --- a/devel/rubygem-aws-sdk-devopsguru/Makefile +++ b/devel/rubygem-aws-sdk-devopsguru/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-devopsguru -PORTVERSION= 1.21.0 +PORTVERSION= 1.22.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-devopsguru/distinfo b/devel/rubygem-aws-sdk-devopsguru/distinfo index 678b32ebdec..98af08e8d34 100644 --- a/devel/rubygem-aws-sdk-devopsguru/distinfo +++ b/devel/rubygem-aws-sdk-devopsguru/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058358 -SHA256 (rubygem/aws-sdk-devopsguru-1.21.0.gem) = 244f7c19a1499f630fbb4fbaf988a7d16fd5ca587531ffb47891949732f15188 -SIZE (rubygem/aws-sdk-devopsguru-1.21.0.gem) = 55296 +TIMESTAMP = 1647264748 +SHA256 (rubygem/aws-sdk-devopsguru-1.22.0.gem) = bc8a1cf224b1752a38d43482d0a4a3323d382534c8e4aca3e6ac0ac322c4893a +SIZE (rubygem/aws-sdk-devopsguru-1.22.0.gem) = 56832 diff --git a/devel/rubygem-aws-sdk-ec2/Makefile b/devel/rubygem-aws-sdk-ec2/Makefile index 069b50c6fed..0219504e3aa 100644 --- a/devel/rubygem-aws-sdk-ec2/Makefile +++ b/devel/rubygem-aws-sdk-ec2/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-ec2 -PORTVERSION= 1.296.0 +PORTVERSION= 1.302.0 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -11,7 +11,7 @@ COMMENT= Official AWS Ruby gem for Amazon Elastic Compute Cloud (Amazon EC2) LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE.txt -RUN_DEPENDS= rubygem-aws-sdk-core>=3.125.0<4:devel/rubygem-aws-sdk-core \ +RUN_DEPENDS= rubygem-aws-sdk-core>=3.127.0<4:devel/rubygem-aws-sdk-core \ rubygem-aws-sigv4>=1.1<2:devel/rubygem-aws-sigv4 USES= gem diff --git a/devel/rubygem-aws-sdk-ec2/distinfo b/devel/rubygem-aws-sdk-ec2/distinfo index 1bf5936aaab..8f3c7858ac0 100644 --- a/devel/rubygem-aws-sdk-ec2/distinfo +++ b/devel/rubygem-aws-sdk-ec2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058376 -SHA256 (rubygem/aws-sdk-ec2-1.296.0.gem) = 51fdf3d82d45ee7683d463346ebe83133949d382e4ce63e66923aaa785bf8538 -SIZE (rubygem/aws-sdk-ec2-1.296.0.gem) = 920064 +TIMESTAMP = 1647264750 +SHA256 (rubygem/aws-sdk-ec2-1.302.0.gem) = 6835b3b575105997dcd2532518b196c94d9939be698a96ef3888fbe927cfa98f +SIZE (rubygem/aws-sdk-ec2-1.302.0.gem) = 924672 diff --git a/devel/rubygem-aws-sdk-ecr/Makefile b/devel/rubygem-aws-sdk-ecr/Makefile index 4d83c78ce84..5a586d0314c 100644 --- a/devel/rubygem-aws-sdk-ecr/Makefile +++ b/devel/rubygem-aws-sdk-ecr/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-ecr -PORTVERSION= 1.54.0 +PORTVERSION= 1.55.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-ecr/distinfo b/devel/rubygem-aws-sdk-ecr/distinfo index c3912403d78..8e8b3f55bee 100644 --- a/devel/rubygem-aws-sdk-ecr/distinfo +++ b/devel/rubygem-aws-sdk-ecr/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058380 -SHA256 (rubygem/aws-sdk-ecr-1.54.0.gem) = 9fb7bd7d3c1256cf83ac49516651e3afc6e0eeb2cbb9104a627d3ea55e15c572 -SIZE (rubygem/aws-sdk-ecr-1.54.0.gem) = 61440 +TIMESTAMP = 1647264752 +SHA256 (rubygem/aws-sdk-ecr-1.55.0.gem) = 196a7d6dd6a44c70b53e19c80229d316c86801241d8e3be50078644a8f257296 +SIZE (rubygem/aws-sdk-ecr-1.55.0.gem) = 61952 diff --git a/devel/rubygem-aws-sdk-ecs/Makefile b/devel/rubygem-aws-sdk-ecs/Makefile index 0bc0fbe6715..0d8704e3b50 100644 --- a/devel/rubygem-aws-sdk-ecs/Makefile +++ b/devel/rubygem-aws-sdk-ecs/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-ecs -PORTVERSION= 1.96.0 +PORTVERSION= 1.97.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-ecs/distinfo b/devel/rubygem-aws-sdk-ecs/distinfo index 54a8ec1fb85..35dc42952ba 100644 --- a/devel/rubygem-aws-sdk-ecs/distinfo +++ b/devel/rubygem-aws-sdk-ecs/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058384 -SHA256 (rubygem/aws-sdk-ecs-1.96.0.gem) = f48294bb6b6e120d103464ebe8cba08115b612821929b1f5453213983ce079e5 -SIZE (rubygem/aws-sdk-ecs-1.96.0.gem) = 176128 +TIMESTAMP = 1647264754 +SHA256 (rubygem/aws-sdk-ecs-1.97.0.gem) = a45e51eb168416702150fd1e5d93c8ba8bf66861fa38801422edf7f70321e55a +SIZE (rubygem/aws-sdk-ecs-1.97.0.gem) = 178688 diff --git a/devel/rubygem-aws-sdk-eks/Makefile b/devel/rubygem-aws-sdk-eks/Makefile index 086221b148e..97900fe6a1f 100644 --- a/devel/rubygem-aws-sdk-eks/Makefile +++ b/devel/rubygem-aws-sdk-eks/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-eks -PORTVERSION= 1.73.0 +PORTVERSION= 1.74.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-eks/distinfo b/devel/rubygem-aws-sdk-eks/distinfo index 56e6b0c41c6..168b30ee6a7 100644 --- a/devel/rubygem-aws-sdk-eks/distinfo +++ b/devel/rubygem-aws-sdk-eks/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058388 -SHA256 (rubygem/aws-sdk-eks-1.73.0.gem) = bf5873b4413956660578580a7889c26faca17b8942cd30b975fbae3f97490cfc -SIZE (rubygem/aws-sdk-eks-1.73.0.gem) = 69632 +TIMESTAMP = 1647264756 +SHA256 (rubygem/aws-sdk-eks-1.74.0.gem) = 9cb35ca34d74bbb6083848ae115964c7d15532e7de38f2d8b1f9790b31188da3 +SIZE (rubygem/aws-sdk-eks-1.74.0.gem) = 69632 diff --git a/devel/rubygem-aws-sdk-elasticache/Makefile b/devel/rubygem-aws-sdk-elasticache/Makefile index 14d19ce3706..c1e03825576 100644 --- a/devel/rubygem-aws-sdk-elasticache/Makefile +++ b/devel/rubygem-aws-sdk-elasticache/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-elasticache -PORTVERSION= 1.73.0 +PORTVERSION= 1.74.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-elasticache/distinfo b/devel/rubygem-aws-sdk-elasticache/distinfo index c5aadfd28d9..4dda372f5ef 100644 --- a/devel/rubygem-aws-sdk-elasticache/distinfo +++ b/devel/rubygem-aws-sdk-elasticache/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058390 -SHA256 (rubygem/aws-sdk-elasticache-1.73.0.gem) = ce3214ffc854038c9d7d3e6a50ee11269d02d1c424e16b002a1f4285d93a54aa -SIZE (rubygem/aws-sdk-elasticache-1.73.0.gem) = 140800 +TIMESTAMP = 1647264758 +SHA256 (rubygem/aws-sdk-elasticache-1.74.0.gem) = 4faae430c26ffd3070db339af027b629bd2df2758c3ef9e013bea7e307c6f2a8 +SIZE (rubygem/aws-sdk-elasticache-1.74.0.gem) = 140800 diff --git a/devel/rubygem-aws-sdk-finspacedata/Makefile b/devel/rubygem-aws-sdk-finspacedata/Makefile index 45a8dc9d502..5e254a4f815 100644 --- a/devel/rubygem-aws-sdk-finspacedata/Makefile +++ b/devel/rubygem-aws-sdk-finspacedata/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-finspacedata -PORTVERSION= 1.13.0 +PORTVERSION= 1.14.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-finspacedata/distinfo b/devel/rubygem-aws-sdk-finspacedata/distinfo index 6e93304e5dc..a9a403ac53c 100644 --- a/devel/rubygem-aws-sdk-finspacedata/distinfo +++ b/devel/rubygem-aws-sdk-finspacedata/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058412 -SHA256 (rubygem/aws-sdk-finspacedata-1.13.0.gem) = 5040485244f7af225bc3d6d3a80d01a4531d6f7482b0f1d5bd5dd36ae6f317e4 -SIZE (rubygem/aws-sdk-finspacedata-1.13.0.gem) = 32768 +TIMESTAMP = 1647264760 +SHA256 (rubygem/aws-sdk-finspacedata-1.14.0.gem) = 609ece400be42cf661c4f02eeb0503c46dcccf976fa96f26d141bcfe54d6ba6c +SIZE (rubygem/aws-sdk-finspacedata-1.14.0.gem) = 40960 diff --git a/devel/rubygem-aws-sdk-fis/Makefile b/devel/rubygem-aws-sdk-fis/Makefile index a1c7cdc7f75..93981cdc868 100644 --- a/devel/rubygem-aws-sdk-fis/Makefile +++ b/devel/rubygem-aws-sdk-fis/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-fis -PORTVERSION= 1.12.0 +PORTVERSION= 1.13.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-fis/distinfo b/devel/rubygem-aws-sdk-fis/distinfo index aa320883218..ca34e6e522f 100644 --- a/devel/rubygem-aws-sdk-fis/distinfo +++ b/devel/rubygem-aws-sdk-fis/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058416 -SHA256 (rubygem/aws-sdk-fis-1.12.0.gem) = 9e60bb9d7bdd795e3f6baea352d05667e74e2df4826a54699fc4950b6660f4b9 -SIZE (rubygem/aws-sdk-fis-1.12.0.gem) = 28672 +TIMESTAMP = 1647264762 +SHA256 (rubygem/aws-sdk-fis-1.13.0.gem) = 8cd2fdf9fca6ceacc8b8bc1208eee4ce7c77e391e4946af6379494bb49d7aa68 +SIZE (rubygem/aws-sdk-fis-1.13.0.gem) = 30208 diff --git a/devel/rubygem-aws-sdk-fsx/Makefile b/devel/rubygem-aws-sdk-fsx/Makefile index 31f98b786e2..02c193a6068 100644 --- a/devel/rubygem-aws-sdk-fsx/Makefile +++ b/devel/rubygem-aws-sdk-fsx/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-fsx -PORTVERSION= 1.50.0 +PORTVERSION= 1.52.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-fsx/distinfo b/devel/rubygem-aws-sdk-fsx/distinfo index fc129c2798b..858c06c4e8e 100644 --- a/devel/rubygem-aws-sdk-fsx/distinfo +++ b/devel/rubygem-aws-sdk-fsx/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058426 -SHA256 (rubygem/aws-sdk-fsx-1.50.0.gem) = b1ba600225ad08f9ced989a7e1b3a0a01047ed5203056b9e1a89155a0da63ceb -SIZE (rubygem/aws-sdk-fsx-1.50.0.gem) = 131072 +TIMESTAMP = 1647264764 +SHA256 (rubygem/aws-sdk-fsx-1.52.0.gem) = 5cd19128be04d1e114abcb20e07a16616bb5c1d17f70ac57cbe571da31b5d02c +SIZE (rubygem/aws-sdk-fsx-1.52.0.gem) = 133632 diff --git a/devel/rubygem-aws-sdk-gamelift/Makefile b/devel/rubygem-aws-sdk-gamelift/Makefile index 1bcac3d117f..d1274120c2f 100644 --- a/devel/rubygem-aws-sdk-gamelift/Makefile +++ b/devel/rubygem-aws-sdk-gamelift/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-gamelift -PORTVERSION= 1.55.0 +PORTVERSION= 1.56.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-gamelift/distinfo b/devel/rubygem-aws-sdk-gamelift/distinfo index abb03e15ccb..7cbc4efec5a 100644 --- a/devel/rubygem-aws-sdk-gamelift/distinfo +++ b/devel/rubygem-aws-sdk-gamelift/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058428 -SHA256 (rubygem/aws-sdk-gamelift-1.55.0.gem) = d5872d72456561844df134492f44da38e5858899676af189a963be0cecb80920 -SIZE (rubygem/aws-sdk-gamelift-1.55.0.gem) = 172544 +TIMESTAMP = 1647264766 +SHA256 (rubygem/aws-sdk-gamelift-1.56.0.gem) = 45de8a3eee97ff211c26f1eb9db2aea5c394e17f29df8a3aeca09b4af7e39fd2 +SIZE (rubygem/aws-sdk-gamelift-1.56.0.gem) = 172544 diff --git a/devel/rubygem-aws-sdk-greengrassv2/Makefile b/devel/rubygem-aws-sdk-greengrassv2/Makefile index 093f0a1b727..2042e5b8f8b 100644 --- a/devel/rubygem-aws-sdk-greengrassv2/Makefile +++ b/devel/rubygem-aws-sdk-greengrassv2/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-greengrassv2 -PORTVERSION= 1.16.0 +PORTVERSION= 1.17.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-greengrassv2/distinfo b/devel/rubygem-aws-sdk-greengrassv2/distinfo index af6a5d866a3..e997cffac31 100644 --- a/devel/rubygem-aws-sdk-greengrassv2/distinfo +++ b/devel/rubygem-aws-sdk-greengrassv2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058440 -SHA256 (rubygem/aws-sdk-greengrassv2-1.16.0.gem) = 14bdb7b12a6835822ab960378817a5f351e1934b4758aba2199a09f56e1a114d -SIZE (rubygem/aws-sdk-greengrassv2-1.16.0.gem) = 48640 +TIMESTAMP = 1647264768 +SHA256 (rubygem/aws-sdk-greengrassv2-1.17.0.gem) = baaf1fc943508d4f8d5ba068b5584d264e13186449d05c8307022aba6048c64f +SIZE (rubygem/aws-sdk-greengrassv2-1.17.0.gem) = 48640 diff --git a/devel/rubygem-aws-sdk-kafkaconnect/Makefile b/devel/rubygem-aws-sdk-kafkaconnect/Makefile index d4576ba9ff5..fda56977ff2 100644 --- a/devel/rubygem-aws-sdk-kafkaconnect/Makefile +++ b/devel/rubygem-aws-sdk-kafkaconnect/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-kafkaconnect -PORTVERSION= 1.6.0 +PORTVERSION= 1.7.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-kafkaconnect/distinfo b/devel/rubygem-aws-sdk-kafkaconnect/distinfo index 9063caaa0f3..88ee6a5df2b 100644 --- a/devel/rubygem-aws-sdk-kafkaconnect/distinfo +++ b/devel/rubygem-aws-sdk-kafkaconnect/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058498 -SHA256 (rubygem/aws-sdk-kafkaconnect-1.6.0.gem) = f2ecf0f51f059d12943f88972007b48ce8363ef66ed83f6cc2d8b706468c093f -SIZE (rubygem/aws-sdk-kafkaconnect-1.6.0.gem) = 30208 +TIMESTAMP = 1647264770 +SHA256 (rubygem/aws-sdk-kafkaconnect-1.7.0.gem) = 766c893a3855dd0ee462c14a110267a1c2193ed988ff616dba9e89c0606ed1b8 +SIZE (rubygem/aws-sdk-kafkaconnect-1.7.0.gem) = 30720 diff --git a/devel/rubygem-aws-sdk-kendra/Makefile b/devel/rubygem-aws-sdk-kendra/Makefile index 862a756b9a7..135a74d31d1 100644 --- a/devel/rubygem-aws-sdk-kendra/Makefile +++ b/devel/rubygem-aws-sdk-kendra/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-kendra -PORTVERSION= 1.45.0 +PORTVERSION= 1.46.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-kendra/distinfo b/devel/rubygem-aws-sdk-kendra/distinfo index 05b11ae1259..bcbdbe2f163 100644 --- a/devel/rubygem-aws-sdk-kendra/distinfo +++ b/devel/rubygem-aws-sdk-kendra/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058500 -SHA256 (rubygem/aws-sdk-kendra-1.45.0.gem) = 2a2e305afeadef4e8c92aad1b41d8ed0ef59ddc459c1f680d93e70ac084b028e -SIZE (rubygem/aws-sdk-kendra-1.45.0.gem) = 126464 +TIMESTAMP = 1647264772 +SHA256 (rubygem/aws-sdk-kendra-1.46.0.gem) = 43a65960e0d194e4127ff80bf0c86713a917cdf374fa6c1c480de8ae52f851f6 +SIZE (rubygem/aws-sdk-kendra-1.46.0.gem) = 127488 diff --git a/devel/rubygem-aws-sdk-keyspaces/Makefile b/devel/rubygem-aws-sdk-keyspaces/Makefile new file mode 100644 index 00000000000..13d2dc62dd0 --- /dev/null +++ b/devel/rubygem-aws-sdk-keyspaces/Makefile @@ -0,0 +1,22 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= aws-sdk-keyspaces +PORTVERSION= 1.0.0 +CATEGORIES= devel rubygems +MASTER_SITES= RG + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Official AWS Ruby gem for Amazon Keyspaces + +LICENSE= APACHE20 +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +RUN_DEPENDS= rubygem-aws-sdk-core>=3.127.0<4:devel/rubygem-aws-sdk-core \ + rubygem-aws-sigv4>=1.1<2:devel/rubygem-aws-sigv4 + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-aws-sdk-keyspaces/distinfo b/devel/rubygem-aws-sdk-keyspaces/distinfo new file mode 100644 index 00000000000..4733b54966a --- /dev/null +++ b/devel/rubygem-aws-sdk-keyspaces/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264414 +SHA256 (rubygem/aws-sdk-keyspaces-1.0.0.gem) = 4e0f347d3cddebfd1083537054ad246b304e65bd8dfe056001a4e1bf7240f41b +SIZE (rubygem/aws-sdk-keyspaces-1.0.0.gem) = 29696 diff --git a/devel/rubygem-aws-sdk-keyspaces/pkg-descr b/devel/rubygem-aws-sdk-keyspaces/pkg-descr new file mode 100644 index 00000000000..385ce1af43c --- /dev/null +++ b/devel/rubygem-aws-sdk-keyspaces/pkg-descr @@ -0,0 +1,4 @@ +Official AWS Ruby gem for Amazon Keyspaces. + +WWW: https://github.com/aws/aws-sdk-ruby +WWW: https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-keyspaces diff --git a/devel/rubygem-aws-sdk-lambda/Makefile b/devel/rubygem-aws-sdk-lambda/Makefile index 7ac4935a8ab..459c97cfbe3 100644 --- a/devel/rubygem-aws-sdk-lambda/Makefile +++ b/devel/rubygem-aws-sdk-lambda/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-lambda -PORTVERSION= 1.80.0 +PORTVERSION= 1.81.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-lambda/distinfo b/devel/rubygem-aws-sdk-lambda/distinfo index b25f9d74314..414aff7c68e 100644 --- a/devel/rubygem-aws-sdk-lambda/distinfo +++ b/devel/rubygem-aws-sdk-lambda/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058520 -SHA256 (rubygem/aws-sdk-lambda-1.80.0.gem) = 892416b15c283afb638f0ba2ab84f59fd4446a07cf09bbe8fd32227b20646112 -SIZE (rubygem/aws-sdk-lambda-1.80.0.gem) = 90112 +TIMESTAMP = 1647264774 +SHA256 (rubygem/aws-sdk-lambda-1.81.0.gem) = f8536e8ebf2b1aa38eed8879676b9383a09a9ddf4dc2a7a7aeeda600b3258821 +SIZE (rubygem/aws-sdk-lambda-1.81.0.gem) = 90112 diff --git a/devel/rubygem-aws-sdk-lexmodelsv2/Makefile b/devel/rubygem-aws-sdk-lexmodelsv2/Makefile index 64064af93c3..026ee60cbf3 100644 --- a/devel/rubygem-aws-sdk-lexmodelsv2/Makefile +++ b/devel/rubygem-aws-sdk-lexmodelsv2/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-lexmodelsv2 -PORTVERSION= 1.22.0 +PORTVERSION= 1.23.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-lexmodelsv2/distinfo b/devel/rubygem-aws-sdk-lexmodelsv2/distinfo index 33bdf7f68db..8ab8e2f05c8 100644 --- a/devel/rubygem-aws-sdk-lexmodelsv2/distinfo +++ b/devel/rubygem-aws-sdk-lexmodelsv2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058528 -SHA256 (rubygem/aws-sdk-lexmodelsv2-1.22.0.gem) = 2f2951c668f5a0e2c87e62b405fdf61514a1d95677966da9c4608e9c3df54cd0 -SIZE (rubygem/aws-sdk-lexmodelsv2-1.22.0.gem) = 125440 +TIMESTAMP = 1647264776 +SHA256 (rubygem/aws-sdk-lexmodelsv2-1.23.0.gem) = 93df5164affb0e0f8a276b013daa5237f0cb30f625c4b952b68b7b4f8ad9cbc3 +SIZE (rubygem/aws-sdk-lexmodelsv2-1.23.0.gem) = 125952 diff --git a/devel/rubygem-aws-sdk-macie/Makefile b/devel/rubygem-aws-sdk-macie/Makefile index 832b3d47a8f..f6513713698 100644 --- a/devel/rubygem-aws-sdk-macie/Makefile +++ b/devel/rubygem-aws-sdk-macie/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-macie -PORTVERSION= 1.37.0 +PORTVERSION= 1.38.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-macie/distinfo b/devel/rubygem-aws-sdk-macie/distinfo index 252b41a3d0e..f2df0333246 100644 --- a/devel/rubygem-aws-sdk-macie/distinfo +++ b/devel/rubygem-aws-sdk-macie/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058546 -SHA256 (rubygem/aws-sdk-macie-1.37.0.gem) = 16643c1751a57ccf8cfd01ede6b9b5a1150fab84f50b3dfd39976d36f60818f8 -SIZE (rubygem/aws-sdk-macie-1.37.0.gem) = 20992 +TIMESTAMP = 1647264778 +SHA256 (rubygem/aws-sdk-macie-1.38.0.gem) = 7ab350c77d8001a74f6c18cb3b1758896325f67b2f692d9bcb84ef0de0b9d3a9 +SIZE (rubygem/aws-sdk-macie-1.38.0.gem) = 20992 diff --git a/devel/rubygem-aws-sdk-mediaconvert/Makefile b/devel/rubygem-aws-sdk-mediaconvert/Makefile index e412df2d153..d88d796c246 100644 --- a/devel/rubygem-aws-sdk-mediaconvert/Makefile +++ b/devel/rubygem-aws-sdk-mediaconvert/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-mediaconvert -PORTVERSION= 1.86.0 +PORTVERSION= 1.87.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-mediaconvert/distinfo b/devel/rubygem-aws-sdk-mediaconvert/distinfo index 82de79c4324..f1a4ae096b2 100644 --- a/devel/rubygem-aws-sdk-mediaconvert/distinfo +++ b/devel/rubygem-aws-sdk-mediaconvert/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058562 -SHA256 (rubygem/aws-sdk-mediaconvert-1.86.0.gem) = ecee742442b0d290459a1f5efd6eb7d41bd3393a002c20236e98b2dfab208c7e -SIZE (rubygem/aws-sdk-mediaconvert-1.86.0.gem) = 457216 +TIMESTAMP = 1647264780 +SHA256 (rubygem/aws-sdk-mediaconvert-1.87.0.gem) = 99475a674b592f6eb3e8c5b26d34526bd2498d84601ade979fa40bab8c81fe15 +SIZE (rubygem/aws-sdk-mediaconvert-1.87.0.gem) = 458752 diff --git a/devel/rubygem-aws-sdk-mediapackage/Makefile b/devel/rubygem-aws-sdk-mediapackage/Makefile index 29c17ce4478..e1bfea81b0f 100644 --- a/devel/rubygem-aws-sdk-mediapackage/Makefile +++ b/devel/rubygem-aws-sdk-mediapackage/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-mediapackage -PORTVERSION= 1.51.0 +PORTVERSION= 1.52.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-mediapackage/distinfo b/devel/rubygem-aws-sdk-mediapackage/distinfo index 26fe48c8bd9..bf1f4675439 100644 --- a/devel/rubygem-aws-sdk-mediapackage/distinfo +++ b/devel/rubygem-aws-sdk-mediapackage/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058566 -SHA256 (rubygem/aws-sdk-mediapackage-1.51.0.gem) = 50c3c6d4bb18a7f60a0f992df2bc7b76b55b12dc0e7d5e21f4eacf67ec09dfc4 -SIZE (rubygem/aws-sdk-mediapackage-1.51.0.gem) = 39424 +TIMESTAMP = 1647264782 +SHA256 (rubygem/aws-sdk-mediapackage-1.52.0.gem) = 8ec3fe311ed4d4356e481f116c184b6ae6cae8f363cd54b7e86c9a38a187cfb3 +SIZE (rubygem/aws-sdk-mediapackage-1.52.0.gem) = 39424 diff --git a/devel/rubygem-aws-sdk-mgn/Makefile b/devel/rubygem-aws-sdk-mgn/Makefile index 5ddb0ca24c9..d223a9a3d3e 100644 --- a/devel/rubygem-aws-sdk-mgn/Makefile +++ b/devel/rubygem-aws-sdk-mgn/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-mgn -PORTVERSION= 1.11.0 +PORTVERSION= 1.12.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-mgn/distinfo b/devel/rubygem-aws-sdk-mgn/distinfo index 8019806ddfc..ac4446f0c9f 100644 --- a/devel/rubygem-aws-sdk-mgn/distinfo +++ b/devel/rubygem-aws-sdk-mgn/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058578 -SHA256 (rubygem/aws-sdk-mgn-1.11.0.gem) = d65699fc903722c81566705705c5212f75af20c2f4e5f97b0c4f96c48dbe6dbb -SIZE (rubygem/aws-sdk-mgn-1.11.0.gem) = 39936 +TIMESTAMP = 1647264784 +SHA256 (rubygem/aws-sdk-mgn-1.12.0.gem) = 5961bbdf46b071059ee4025de4ff2106d509ec4d2ea1d18c164597b8ad274097 +SIZE (rubygem/aws-sdk-mgn-1.12.0.gem) = 39936 diff --git a/devel/rubygem-aws-sdk-migrationhubrefactorspaces/Makefile b/devel/rubygem-aws-sdk-migrationhubrefactorspaces/Makefile index 2b471262e19..66513900470 100644 --- a/devel/rubygem-aws-sdk-migrationhubrefactorspaces/Makefile +++ b/devel/rubygem-aws-sdk-migrationhubrefactorspaces/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-migrationhubrefactorspaces -PORTVERSION= 1.4.0 +PORTVERSION= 1.5.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-migrationhubrefactorspaces/distinfo b/devel/rubygem-aws-sdk-migrationhubrefactorspaces/distinfo index b7fdf46b080..7661f30681f 100644 --- a/devel/rubygem-aws-sdk-migrationhubrefactorspaces/distinfo +++ b/devel/rubygem-aws-sdk-migrationhubrefactorspaces/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058584 -SHA256 (rubygem/aws-sdk-migrationhubrefactorspaces-1.4.0.gem) = f2820827a1b4c75c5f278bb7cd56409e4eb51f51cecf34b0b8b6dfd4f9b27461 -SIZE (rubygem/aws-sdk-migrationhubrefactorspaces-1.4.0.gem) = 35328 +TIMESTAMP = 1647264786 +SHA256 (rubygem/aws-sdk-migrationhubrefactorspaces-1.5.0.gem) = 543d3efe3f2b592f77559b1a2f8de5d4bc0f5b80a6af9764a7d469a777abbff1 +SIZE (rubygem/aws-sdk-migrationhubrefactorspaces-1.5.0.gem) = 35328 diff --git a/devel/rubygem-aws-sdk-outposts/Makefile b/devel/rubygem-aws-sdk-outposts/Makefile index 80cd6cb8002..4fb1e0dfc72 100644 --- a/devel/rubygem-aws-sdk-outposts/Makefile +++ b/devel/rubygem-aws-sdk-outposts/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-outposts -PORTVERSION= 1.29.0 +PORTVERSION= 1.30.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-outposts/distinfo b/devel/rubygem-aws-sdk-outposts/distinfo index e7bb42783e9..c35e3cf24e6 100644 --- a/devel/rubygem-aws-sdk-outposts/distinfo +++ b/devel/rubygem-aws-sdk-outposts/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058612 -SHA256 (rubygem/aws-sdk-outposts-1.29.0.gem) = 539e5217ad575e3bd29b056ce2758361f9a757caed29e26e06e87889adea582d -SIZE (rubygem/aws-sdk-outposts-1.29.0.gem) = 33792 +TIMESTAMP = 1647264788 +SHA256 (rubygem/aws-sdk-outposts-1.30.0.gem) = c8134c134a4c510e730849e2def4db46342bd15fdc020f084a8dd5de33b0ccc4 +SIZE (rubygem/aws-sdk-outposts-1.30.0.gem) = 34304 diff --git a/devel/rubygem-aws-sdk-panorama/Makefile b/devel/rubygem-aws-sdk-panorama/Makefile index 9d5af4f3836..b2c779b6efe 100644 --- a/devel/rubygem-aws-sdk-panorama/Makefile +++ b/devel/rubygem-aws-sdk-panorama/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-panorama -PORTVERSION= 1.5.0 +PORTVERSION= 1.6.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-panorama/distinfo b/devel/rubygem-aws-sdk-panorama/distinfo index 08e7eb40c11..1b178753448 100644 --- a/devel/rubygem-aws-sdk-panorama/distinfo +++ b/devel/rubygem-aws-sdk-panorama/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058614 -SHA256 (rubygem/aws-sdk-panorama-1.5.0.gem) = 32bdb906e16fb6e8d01b5f4d09093ce0262cf8b363603c08de6a7781ef5455e7 -SIZE (rubygem/aws-sdk-panorama-1.5.0.gem) = 38912 +TIMESTAMP = 1647264790 +SHA256 (rubygem/aws-sdk-panorama-1.6.0.gem) = 29d3d9ecef884a87d9a53680c8a11d0b373e71979d2fce3e29bb293214697154 +SIZE (rubygem/aws-sdk-panorama-1.6.0.gem) = 39936 diff --git a/devel/rubygem-aws-sdk-rds/Makefile b/devel/rubygem-aws-sdk-rds/Makefile index fd2aeccb282..da5a088f2f4 100644 --- a/devel/rubygem-aws-sdk-rds/Makefile +++ b/devel/rubygem-aws-sdk-rds/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-rds -PORTVERSION= 1.140.0 +PORTVERSION= 1.141.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-rds/distinfo b/devel/rubygem-aws-sdk-rds/distinfo index 74d5379b3cd..794e7592144 100644 --- a/devel/rubygem-aws-sdk-rds/distinfo +++ b/devel/rubygem-aws-sdk-rds/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058646 -SHA256 (rubygem/aws-sdk-rds-1.140.0.gem) = 279203da55d0645a1fb51ac4b18315f2ddc364683cd391f8ad9b99014f12438d -SIZE (rubygem/aws-sdk-rds-1.140.0.gem) = 418816 +TIMESTAMP = 1647264792 +SHA256 (rubygem/aws-sdk-rds-1.141.0.gem) = 6d24e5c3684a2637424e7a77b774224843b3451d0dcc12fd1bb93e9e99827db2 +SIZE (rubygem/aws-sdk-rds-1.141.0.gem) = 420864 diff --git a/devel/rubygem-aws-sdk-resources/Makefile b/devel/rubygem-aws-sdk-resources/Makefile index 56dc76c6bae..b2348869bd6 100644 --- a/devel/rubygem-aws-sdk-resources/Makefile +++ b/devel/rubygem-aws-sdk-resources/Makefile @@ -1,5 +1,5 @@ PORTNAME= aws-sdk-resources -PORTVERSION= 3.124.0 +PORTVERSION= 3.125.0 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -165,6 +165,7 @@ RUN_DEPENDS= rubygem-aws-sdk-accessanalyzer>=1<2:devel/rubygem-aws-sdk-accessana rubygem-aws-sdk-kafka>=1<2:devel/rubygem-aws-sdk-kafka \ rubygem-aws-sdk-kafkaconnect>=1<2:devel/rubygem-aws-sdk-kafkaconnect \ rubygem-aws-sdk-kendra>=1<2:devel/rubygem-aws-sdk-kendra \ + rubygem-aws-sdk-keyspaces>=1<2:devel/rubygem-aws-sdk-keyspaces \ rubygem-aws-sdk-kinesis>=1<2:devel/rubygem-aws-sdk-kinesis \ rubygem-aws-sdk-kinesisanalytics>=1<2:devel/rubygem-aws-sdk-kinesisanalytics \ rubygem-aws-sdk-kinesisanalyticsv2>=1<2:devel/rubygem-aws-sdk-kinesisanalyticsv2 \ diff --git a/devel/rubygem-aws-sdk-resources/distinfo b/devel/rubygem-aws-sdk-resources/distinfo index 56f849a4eca..d4272d0516f 100644 --- a/devel/rubygem-aws-sdk-resources/distinfo +++ b/devel/rubygem-aws-sdk-resources/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641046510 -SHA256 (rubygem/aws-sdk-resources-3.124.0.gem) = a8ca830c86e84146b4f547798ccee0bd2e77ba5ce51d4bd48c245280e8c326b2 -SIZE (rubygem/aws-sdk-resources-3.124.0.gem) = 18944 +TIMESTAMP = 1647264794 +SHA256 (rubygem/aws-sdk-resources-3.125.0.gem) = 3fbd6ccc5527940bf8362317610fefe26d51d1268e385eb49a41bb91607e1737 +SIZE (rubygem/aws-sdk-resources-3.125.0.gem) = 18944 diff --git a/devel/rubygem-aws-sdk-route53recoverycluster/Makefile b/devel/rubygem-aws-sdk-route53recoverycluster/Makefile index 0820bf47d9b..e66a3b4706b 100644 --- a/devel/rubygem-aws-sdk-route53recoverycluster/Makefile +++ b/devel/rubygem-aws-sdk-route53recoverycluster/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-route53recoverycluster -PORTVERSION= 1.9.0 +PORTVERSION= 1.10.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-route53recoverycluster/distinfo b/devel/rubygem-aws-sdk-route53recoverycluster/distinfo index ec7d4f10e5e..f85814ec370 100644 --- a/devel/rubygem-aws-sdk-route53recoverycluster/distinfo +++ b/devel/rubygem-aws-sdk-route53recoverycluster/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058666 -SHA256 (rubygem/aws-sdk-route53recoverycluster-1.9.0.gem) = 3cbed0b1d7b03ec4fb0f42f198618d74ba64edbb3edd5d4fd75c0dd87f82e408 -SIZE (rubygem/aws-sdk-route53recoverycluster-1.9.0.gem) = 17920 +TIMESTAMP = 1647264796 +SHA256 (rubygem/aws-sdk-route53recoverycluster-1.10.0.gem) = a2417e1fa31ad4adf47faa9999eeb80ce53651a3c15a4f487c06fe529e330114 +SIZE (rubygem/aws-sdk-route53recoverycluster-1.10.0.gem) = 18432 diff --git a/devel/rubygem-aws-sdk-secretsmanager/Makefile b/devel/rubygem-aws-sdk-secretsmanager/Makefile index a2f9eb4b35d..864bde250d7 100644 --- a/devel/rubygem-aws-sdk-secretsmanager/Makefile +++ b/devel/rubygem-aws-sdk-secretsmanager/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-secretsmanager -PORTVERSION= 1.58.0 +PORTVERSION= 1.59.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-secretsmanager/distinfo b/devel/rubygem-aws-sdk-secretsmanager/distinfo index 446d1582da6..259ff9de9b1 100644 --- a/devel/rubygem-aws-sdk-secretsmanager/distinfo +++ b/devel/rubygem-aws-sdk-secretsmanager/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058692 -SHA256 (rubygem/aws-sdk-secretsmanager-1.58.0.gem) = b2efa55b18985d0fe974023023eaba1fe40a8a22c1ae0d3bd985a4be0a606c32 -SIZE (rubygem/aws-sdk-secretsmanager-1.58.0.gem) = 51712 +TIMESTAMP = 1647264798 +SHA256 (rubygem/aws-sdk-secretsmanager-1.59.0.gem) = 84679d86217e3eb224a9c53b2d922be5500193262a5409f8df754bf787450fdd +SIZE (rubygem/aws-sdk-secretsmanager-1.59.0.gem) = 51712 diff --git a/devel/rubygem-aws-sdk-synthetics/Makefile b/devel/rubygem-aws-sdk-synthetics/Makefile index 677b78bf42a..f3dac651a02 100644 --- a/devel/rubygem-aws-sdk-synthetics/Makefile +++ b/devel/rubygem-aws-sdk-synthetics/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-synthetics -PORTVERSION= 1.25.0 +PORTVERSION= 1.26.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-synthetics/distinfo b/devel/rubygem-aws-sdk-synthetics/distinfo index dc5bff258b5..5b587dd53f2 100644 --- a/devel/rubygem-aws-sdk-synthetics/distinfo +++ b/devel/rubygem-aws-sdk-synthetics/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058742 -SHA256 (rubygem/aws-sdk-synthetics-1.25.0.gem) = 26c9914e8c8c6f85738b85511c13ab40e531b82b9165874a328c326a52780484 -SIZE (rubygem/aws-sdk-synthetics-1.25.0.gem) = 34304 +TIMESTAMP = 1647264800 +SHA256 (rubygem/aws-sdk-synthetics-1.26.0.gem) = 4dfa9084bd90066b7333a56cddb90acb21a7801ef1837607743a70667ce4436a +SIZE (rubygem/aws-sdk-synthetics-1.26.0.gem) = 34816 diff --git a/devel/rubygem-aws-sdk-timestreamquery/Makefile b/devel/rubygem-aws-sdk-timestreamquery/Makefile index 7e844028e4c..2bfa76adf24 100644 --- a/devel/rubygem-aws-sdk-timestreamquery/Makefile +++ b/devel/rubygem-aws-sdk-timestreamquery/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-timestreamquery -PORTVERSION= 1.14.0 +PORTVERSION= 1.15.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-timestreamquery/distinfo b/devel/rubygem-aws-sdk-timestreamquery/distinfo index d1663d1784c..ee923f8760b 100644 --- a/devel/rubygem-aws-sdk-timestreamquery/distinfo +++ b/devel/rubygem-aws-sdk-timestreamquery/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058746 -SHA256 (rubygem/aws-sdk-timestreamquery-1.14.0.gem) = a4a06d71b6d5bb4adbbdd5466ba21c060482acb2dfbbefae164d75a1fd646538 -SIZE (rubygem/aws-sdk-timestreamquery-1.14.0.gem) = 36352 +TIMESTAMP = 1647264802 +SHA256 (rubygem/aws-sdk-timestreamquery-1.15.0.gem) = a8222428c8b85ecfa334614c0d62071564ed02bd16d2af93ca8d18e3fb777b7b +SIZE (rubygem/aws-sdk-timestreamquery-1.15.0.gem) = 36352 diff --git a/devel/rubygem-aws-sdk-transcribeservice/Makefile b/devel/rubygem-aws-sdk-transcribeservice/Makefile index 969d63a2189..a0737507824 100644 --- a/devel/rubygem-aws-sdk-transcribeservice/Makefile +++ b/devel/rubygem-aws-sdk-transcribeservice/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-transcribeservice -PORTVERSION= 1.72.0 +PORTVERSION= 1.73.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-transcribeservice/distinfo b/devel/rubygem-aws-sdk-transcribeservice/distinfo index 35238d72c43..5ca0bc7e6c7 100644 --- a/devel/rubygem-aws-sdk-transcribeservice/distinfo +++ b/devel/rubygem-aws-sdk-transcribeservice/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058750 -SHA256 (rubygem/aws-sdk-transcribeservice-1.72.0.gem) = f1bdf0c5525e1f8e6d1ae0f26782d38c5c8f6ec73f12679734f24ca6eb843502 -SIZE (rubygem/aws-sdk-transcribeservice-1.72.0.gem) = 67584 +TIMESTAMP = 1647264804 +SHA256 (rubygem/aws-sdk-transcribeservice-1.73.0.gem) = 107e8d687b579f45f079b6db5ed64b98389a2bfe4229c9afe0983e90e7df1356 +SIZE (rubygem/aws-sdk-transcribeservice-1.73.0.gem) = 68608 diff --git a/devel/rubygem-aws-sdk-transcribestreamingservice/Makefile b/devel/rubygem-aws-sdk-transcribestreamingservice/Makefile index c903fbb565d..b0c97462723 100644 --- a/devel/rubygem-aws-sdk-transcribestreamingservice/Makefile +++ b/devel/rubygem-aws-sdk-transcribestreamingservice/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-transcribestreamingservice -PORTVERSION= 1.41.0 +PORTVERSION= 1.42.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-transcribestreamingservice/distinfo b/devel/rubygem-aws-sdk-transcribestreamingservice/distinfo index 479a1016731..b166b0d46bb 100644 --- a/devel/rubygem-aws-sdk-transcribestreamingservice/distinfo +++ b/devel/rubygem-aws-sdk-transcribestreamingservice/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058752 -SHA256 (rubygem/aws-sdk-transcribestreamingservice-1.41.0.gem) = 4f429b71529e8b8bc51596d2f21be9580091afa986449a6eb6dcb49ae6084460 -SIZE (rubygem/aws-sdk-transcribestreamingservice-1.41.0.gem) = 33280 +TIMESTAMP = 1647264806 +SHA256 (rubygem/aws-sdk-transcribestreamingservice-1.42.0.gem) = fbe5e8a6ee95c9a73239b06afd1a3e88d42e0ca3ff7b271c4ae2d670404b1400 +SIZE (rubygem/aws-sdk-transcribestreamingservice-1.42.0.gem) = 34304 diff --git a/devel/rubygem-aws-sdk-transfer/Makefile b/devel/rubygem-aws-sdk-transfer/Makefile index 2e74cf4c97f..15b82836223 100644 --- a/devel/rubygem-aws-sdk-transfer/Makefile +++ b/devel/rubygem-aws-sdk-transfer/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= aws-sdk-transfer -PORTVERSION= 1.49.0 +PORTVERSION= 1.51.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-aws-sdk-transfer/distinfo b/devel/rubygem-aws-sdk-transfer/distinfo index 8d7e8320754..5d127829f20 100644 --- a/devel/rubygem-aws-sdk-transfer/distinfo +++ b/devel/rubygem-aws-sdk-transfer/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058754 -SHA256 (rubygem/aws-sdk-transfer-1.49.0.gem) = 03e5367b6d6f90185140d26478fa3fbd106bbc6a2ba7d1373b2ddd204784a7f7 -SIZE (rubygem/aws-sdk-transfer-1.49.0.gem) = 68096 +TIMESTAMP = 1647264808 +SHA256 (rubygem/aws-sdk-transfer-1.51.0.gem) = ce5282199055713353076abf96e1356460a21186cddaa5d9f5aa992f099bdd6c +SIZE (rubygem/aws-sdk-transfer-1.51.0.gem) = 72192 diff --git a/devel/rubygem-bootsnap/Makefile b/devel/rubygem-bootsnap/Makefile index 4550fdf1b93..c87d6b83350 100644 --- a/devel/rubygem-bootsnap/Makefile +++ b/devel/rubygem-bootsnap/Makefile @@ -1,5 +1,5 @@ PORTNAME= bootsnap -PORTVERSION= 1.10.3 +PORTVERSION= 1.11.1 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-bootsnap/distinfo b/devel/rubygem-bootsnap/distinfo index 69dd61e85d5..3f0eceedab8 100644 --- a/devel/rubygem-bootsnap/distinfo +++ b/devel/rubygem-bootsnap/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058782 -SHA256 (rubygem/bootsnap-1.10.3.gem) = a1624dcb01cbe7bcbf5be11a033f842ce7f980fb4b59ca3dc7acfdf061e97014 -SIZE (rubygem/bootsnap-1.10.3.gem) = 39424 +TIMESTAMP = 1647264810 +SHA256 (rubygem/bootsnap-1.11.1.gem) = 6fc933ae3e23e0c3ccae2bc4b7d66c1dec774739ca4011b3ce76d7672f82502e +SIZE (rubygem/bootsnap-1.11.1.gem) = 39936 diff --git a/devel/rubygem-cucumber-rails/Makefile b/devel/rubygem-cucumber-rails/Makefile index 49926c2809c..e68a1192e53 100644 --- a/devel/rubygem-cucumber-rails/Makefile +++ b/devel/rubygem-cucumber-rails/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= cucumber-rails -PORTVERSION= 2.4.0 +PORTVERSION= 2.5.0 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -15,7 +15,7 @@ RUN_DEPENDS= rubygem-capybara>=2.18<4:devel/rubygem-capybara \ rubygem-cucumber>=3.2<8:devel/rubygem-cucumber \ rubygem-mime-types>=3.3<4:misc/rubygem-mime-types \ rubygem-nokogiri>=1.10<2:textproc/rubygem-nokogiri \ - rubygem-railties5>=5.0<7:www/rubygem-railties5 \ + rubygem-railties5>=5.0<8:www/rubygem-railties5 \ rubygem-rexml>=3.0<4:textproc/rubygem-rexml \ rubygem-webrick>=1.7<2:www/rubygem-webrick diff --git a/devel/rubygem-cucumber-rails/distinfo b/devel/rubygem-cucumber-rails/distinfo index 66832dd78f2..0964b5644c5 100644 --- a/devel/rubygem-cucumber-rails/distinfo +++ b/devel/rubygem-cucumber-rails/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1627120914 -SHA256 (rubygem/cucumber-rails-2.4.0.gem) = 660452ebd1dff14cc3d8f518cedb22b4b6f22e03d8d6882f53a9d03b2d6720f9 -SIZE (rubygem/cucumber-rails-2.4.0.gem) = 27136 +TIMESTAMP = 1647264812 +SHA256 (rubygem/cucumber-rails-2.5.0.gem) = 59137b170edc667377b09e6dbde4b5af4342c9a739fa2d48e7368270015dea72 +SIZE (rubygem/cucumber-rails-2.5.0.gem) = 27648 diff --git a/devel/rubygem-error_highlight/Makefile b/devel/rubygem-error_highlight/Makefile new file mode 100644 index 00000000000..3dcb77a4254 --- /dev/null +++ b/devel/rubygem-error_highlight/Makefile @@ -0,0 +1,24 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= error_highlight +PORTVERSION= 0.3.0 +CATEGORIES= devel rubygems +MASTER_SITES= RG + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Enhance Exception\#message + +LICENSE= BSD2CLAUSE RUBY +LICENSE_COMB= dual +LICENSE_FILE_BSD2CLAUSE=${WRKSRC}/LICENSE.txt + +BROKEN_RUBY26= yes +BROKEN_RUBY27= yes +BROKEN_RUBY30= yes + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-error_highlight/distinfo b/devel/rubygem-error_highlight/distinfo new file mode 100644 index 00000000000..0795e54cb3a --- /dev/null +++ b/devel/rubygem-error_highlight/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264416 +SHA256 (rubygem/error_highlight-0.3.0.gem) = de94ce0faaf5ec7b78e2a466ed741fa180a4af7865a0c3e5d4599ff5eff96188 +SIZE (rubygem/error_highlight-0.3.0.gem) = 10752 diff --git a/devel/rubygem-error_highlight/pkg-descr b/devel/rubygem-error_highlight/pkg-descr new file mode 100644 index 00000000000..8b85597dc63 --- /dev/null +++ b/devel/rubygem-error_highlight/pkg-descr @@ -0,0 +1,7 @@ +Ruby 3.1 will ship with this gem and it will automatically be required when a +Ruby process starts up. No special setup is required. + +This gem works only on MRI and requires Ruby 3.1 or later because it depends on +MRI's internal APIs that are available since 3.1. + +WWW: https://github.com/ruby/error_highlight diff --git a/devel/rubygem-et-orbi/Makefile b/devel/rubygem-et-orbi/Makefile index f4cac9704ce..2b9c4a25b27 100644 --- a/devel/rubygem-et-orbi/Makefile +++ b/devel/rubygem-et-orbi/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= et-orbi -PORTVERSION= 1.2.6 +PORTVERSION= 1.2.7 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-et-orbi/distinfo b/devel/rubygem-et-orbi/distinfo index 8394b16aa2a..232a1f99089 100644 --- a/devel/rubygem-et-orbi/distinfo +++ b/devel/rubygem-et-orbi/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1635731111 -SHA256 (rubygem/et-orbi-1.2.6.gem) = b5005bf1fa69104796f1173797b30187a3c90dd7bb2de098ab5ed80805504e77 -SIZE (rubygem/et-orbi-1.2.6.gem) = 18432 +TIMESTAMP = 1647264814 +SHA256 (rubygem/et-orbi-1.2.7.gem) = 3b693d47f94a4060ccc07e60adda488759b1e8b9228a633ebbad842dfc245fb4 +SIZE (rubygem/et-orbi-1.2.7.gem) = 18432 diff --git a/devel/rubygem-faker/Makefile b/devel/rubygem-faker/Makefile index 6d5ab9d6c02..2c21706c2ed 100644 --- a/devel/rubygem-faker/Makefile +++ b/devel/rubygem-faker/Makefile @@ -1,5 +1,5 @@ PORTNAME= faker -PORTVERSION= 2.19.0 +PORTVERSION= 2.20.0 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -9,15 +9,15 @@ COMMENT= Library for generating fake data: names, addresses, phone numbers LICENSE= MIT LICENSE_FILE= ${WRKSRC}/License.txt -RUN_DEPENDS= rubygem-i18n>=1.6,2<2,2:devel/rubygem-i18n +RUN_DEPENDS= rubygem-i18n>=1.8.11,2<2,2:devel/rubygem-i18n USES= gem USE_RUBY= yes -CONFLICTS_INSTALL= py*-Faker - NO_ARCH= yes PLIST_FILES= bin/faker +CONFLICTS_INSTALL= py*-Faker + .include diff --git a/devel/rubygem-faker/distinfo b/devel/rubygem-faker/distinfo index 5157643e82e..36878e87bb8 100644 --- a/devel/rubygem-faker/distinfo +++ b/devel/rubygem-faker/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632037874 -SHA256 (rubygem/faker-2.19.0.gem) = fc139e962fb27b61fa7a4260f8baf204b633dbfb41e62ae8165a93e5bbe36941 -SIZE (rubygem/faker-2.19.0.gem) = 1494016 +TIMESTAMP = 1647264816 +SHA256 (rubygem/faker-2.20.0.gem) = 913fb9b4241331a3ce7c24bf4b720e3092d4e728bff34518c148057064b32499 +SIZE (rubygem/faker-2.20.0.gem) = 1506304 diff --git a/devel/rubygem-fog-core/Makefile b/devel/rubygem-fog-core/Makefile index 0e4d0b037bc..42d8b4196cf 100644 --- a/devel/rubygem-fog-core/Makefile +++ b/devel/rubygem-fog-core/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= fog-core -PORTVERSION= 2.2.4 +PORTVERSION= 2.3.0 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -13,7 +13,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE.md RUN_DEPENDS= rubygem-builder>=0:devel/rubygem-builder \ rubygem-excon>=0.71<1:devel/rubygem-excon \ - rubygem-formatador>=0.2<1:devel/rubygem-formatador \ + rubygem-formatador>=0.2<2.0:devel/rubygem-formatador \ rubygem-mime-types>=0:misc/rubygem-mime-types USES= gem diff --git a/devel/rubygem-fog-core/distinfo b/devel/rubygem-fog-core/distinfo index a5a16e8b7d3..6afe076a9a1 100644 --- a/devel/rubygem-fog-core/distinfo +++ b/devel/rubygem-fog-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1619725425 -SHA256 (rubygem/fog-core-2.2.4.gem) = 0f03d08dcc7f03434c4d61d591dac6e45175d2f7262ac1fd56cd67ff6f99bf50 -SIZE (rubygem/fog-core-2.2.4.gem) = 55808 +TIMESTAMP = 1647264818 +SHA256 (rubygem/fog-core-2.3.0.gem) = bc2a33c3c301161cb6cb74ed42e8d6ea4b536a0a5ab0b7124cfdea8d7087a61a +SIZE (rubygem/fog-core-2.3.0.gem) = 55296 diff --git a/devel/rubygem-google-apis-compute_v1/Makefile b/devel/rubygem-google-apis-compute_v1/Makefile index fbe2dcda0f9..cc896b96c2e 100644 --- a/devel/rubygem-google-apis-compute_v1/Makefile +++ b/devel/rubygem-google-apis-compute_v1/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-apis-compute_v1 -PORTVERSION= 0.26.0 +PORTVERSION= 0.27.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-google-apis-compute_v1/distinfo b/devel/rubygem-google-apis-compute_v1/distinfo index 1dca6d51b62..052fb6d7bce 100644 --- a/devel/rubygem-google-apis-compute_v1/distinfo +++ b/devel/rubygem-google-apis-compute_v1/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058796 -SHA256 (rubygem/google-apis-compute_v1-0.26.0.gem) = 1c5db5801713789c23566c361e6494eb2c8cfd487a0747717d8ce38521763fb6 -SIZE (rubygem/google-apis-compute_v1-0.26.0.gem) = 344576 +TIMESTAMP = 1647264820 +SHA256 (rubygem/google-apis-compute_v1-0.27.0.gem) = ee4839e3f6e4e7a68a35b762984dd48c8a4e78feaf098bfdc891150211552f66 +SIZE (rubygem/google-apis-compute_v1-0.27.0.gem) = 356352 diff --git a/devel/rubygem-google-apis-monitoring_v3/Makefile b/devel/rubygem-google-apis-monitoring_v3/Makefile index dfe67c8846d..e515ec9b185 100644 --- a/devel/rubygem-google-apis-monitoring_v3/Makefile +++ b/devel/rubygem-google-apis-monitoring_v3/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-apis-monitoring_v3 -PORTVERSION= 0.22.0 +PORTVERSION= 0.23.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-google-apis-monitoring_v3/distinfo b/devel/rubygem-google-apis-monitoring_v3/distinfo index 1a55bc86b68..2b78df523b4 100644 --- a/devel/rubygem-google-apis-monitoring_v3/distinfo +++ b/devel/rubygem-google-apis-monitoring_v3/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058798 -SHA256 (rubygem/google-apis-monitoring_v3-0.22.0.gem) = a5bac83eb6e53039d8915a08b23102a1431fd60a40818c7c2c47f33363a47227 -SIZE (rubygem/google-apis-monitoring_v3-0.22.0.gem) = 69632 +TIMESTAMP = 1647264822 +SHA256 (rubygem/google-apis-monitoring_v3-0.23.0.gem) = b344db5c2283f022c8450d8db4fb4e53d74778d004115e1bebab2411de16fdfb +SIZE (rubygem/google-apis-monitoring_v3-0.23.0.gem) = 69632 diff --git a/devel/rubygem-google-apis-pubsub_v1/Makefile b/devel/rubygem-google-apis-pubsub_v1/Makefile index 8a6bfc7d2d7..b1c5b0b49cf 100644 --- a/devel/rubygem-google-apis-pubsub_v1/Makefile +++ b/devel/rubygem-google-apis-pubsub_v1/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-apis-pubsub_v1 -PORTVERSION= 0.12.0 +PORTVERSION= 0.13.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-google-apis-pubsub_v1/distinfo b/devel/rubygem-google-apis-pubsub_v1/distinfo index de77ab587fa..1758cfa53cb 100644 --- a/devel/rubygem-google-apis-pubsub_v1/distinfo +++ b/devel/rubygem-google-apis-pubsub_v1/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642102625 -SHA256 (rubygem/google-apis-pubsub_v1-0.12.0.gem) = e430a43c4a77cdaedd4b602abf9bbb97512b676c250ecfed0f785e421344856f -SIZE (rubygem/google-apis-pubsub_v1-0.12.0.gem) = 33792 +TIMESTAMP = 1647264824 +SHA256 (rubygem/google-apis-pubsub_v1-0.13.0.gem) = 97b93e55b4680670537a61dc2fff7af820144d2b98f8e641160566ed82bbd2a5 +SIZE (rubygem/google-apis-pubsub_v1-0.13.0.gem) = 33792 diff --git a/devel/rubygem-i18n/Makefile b/devel/rubygem-i18n/Makefile index 80c66b71295..1b4b72e968b 100644 --- a/devel/rubygem-i18n/Makefile +++ b/devel/rubygem-i18n/Makefile @@ -1,5 +1,5 @@ PORTNAME= i18n -PORTVERSION= 1.9.1 +PORTVERSION= 1.10.0 PORTEPOCH= 2 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-i18n/distinfo b/devel/rubygem-i18n/distinfo index f1dc8e0e22c..818c2aff52b 100644 --- a/devel/rubygem-i18n/distinfo +++ b/devel/rubygem-i18n/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971186 -SHA256 (rubygem/i18n-1.9.1.gem) = 886bb1d0d73f977a48aaf34360fab4bc392d6706faf679031b9387825f6356d9 -SIZE (rubygem/i18n-1.9.1.gem) = 42496 +TIMESTAMP = 1647264826 +SHA256 (rubygem/i18n-1.10.0.gem) = 0f4217161b9caa0da0ac814277862947e4d7cb2378567fc2a3d67b32edf6582c +SIZE (rubygem/i18n-1.10.0.gem) = 44544 diff --git a/devel/rubygem-inspec-core/Makefile b/devel/rubygem-inspec-core/Makefile index 9f9919f94a7..ff5ab4f69e2 100644 --- a/devel/rubygem-inspec-core/Makefile +++ b/devel/rubygem-inspec-core/Makefile @@ -1,8 +1,7 @@ # Created by: Matthias Fechner PORTNAME= inspec-core -PORTVERSION= 4.41.20 -PORTREVISION= 3 +PORTVERSION= 4.52.9 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-inspec-core/distinfo b/devel/rubygem-inspec-core/distinfo index a612d177d11..33ec270ff0e 100644 --- a/devel/rubygem-inspec-core/distinfo +++ b/devel/rubygem-inspec-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632664558 -SHA256 (rubygem/inspec-core-4.41.20.gem) = 81e48fbf755fe59842968af88225e851257b6aaf77afc63328e65b2ac23ee428 -SIZE (rubygem/inspec-core-4.41.20.gem) = 375808 +TIMESTAMP = 1647264996 +SHA256 (rubygem/inspec-core-4.52.9.gem) = 7d5dddc24f4dee1dc1b2a48f57941e89b126570067bee509c6a39b76da8b6fa1 +SIZE (rubygem/inspec-core-4.52.9.gem) = 385024 diff --git a/devel/rubygem-ipynbdiff/Makefile b/devel/rubygem-ipynbdiff/Makefile index 653dc61c32e..ed0c39a09b6 100644 --- a/devel/rubygem-ipynbdiff/Makefile +++ b/devel/rubygem-ipynbdiff/Makefile @@ -1,7 +1,7 @@ # Created by: Matthias Fechner PORTNAME= ipynbdiff -PORTVERSION= 0.3.8 +PORTVERSION= 0.4.4 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -10,8 +10,8 @@ COMMENT= Human readable Jupyter Notebook diffs LICENSE= MIT -RUN_DEPENDS= rubygem-diffy330>=3.3.0<3.3.1:textproc/rubygem-diffy330 \ - rubygem-json>=2.5.1<2.5.2:devel/rubygem-json +RUN_DEPENDS= rubygem-diffy>=3.3<4:textproc/rubygem-diffy \ + rubygem-json>=2.5.1<2.6:devel/rubygem-json USES= gem USE_RUBY= yes diff --git a/devel/rubygem-ipynbdiff/distinfo b/devel/rubygem-ipynbdiff/distinfo index a156a38201b..0b7582daea5 100644 --- a/devel/rubygem-ipynbdiff/distinfo +++ b/devel/rubygem-ipynbdiff/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1640030930 -SHA256 (rubygem/ipynbdiff-0.3.8.gem) = a1812441410e8ee3d2c3f123d1e5f14d5ef77cb90de3cb7eeee1377179275265 -SIZE (rubygem/ipynbdiff-0.3.8.gem) = 8192 +TIMESTAMP = 1647945416 +SHA256 (rubygem/ipynbdiff-0.4.4.gem) = a24b413e1c7871118e6a5bed9460bbf948f8f0185a8a69b47a2178c0eec8f931 +SIZE (rubygem/ipynbdiff-0.4.4.gem) = 10752 diff --git a/devel/rubygem-mongo/files/patch-gemspec b/devel/rubygem-mongo/files/patch-gemspec index 6dfaa969b2a..448292bf417 100644 --- a/devel/rubygem-mongo/files/patch-gemspec +++ b/devel/rubygem-mongo/files/patch-gemspec @@ -9,7 +9,7 @@ s.homepage = "https://docs.mongodb.com/ruby-driver/".freeze s.licenses = ["Apache-2.0".freeze] s.required_ruby_version = Gem::Requirement.new(">= 2.4".freeze) - s.rubygems_version = "3.3.7".freeze + s.rubygems_version = "3.3.9".freeze s.summary = "Ruby driver for MongoDB".freeze - s.test_files = ["spec/kerberos/kerberos_spec.rb".freeze, "spec/stress/push_monitor_close_spec.rb".freeze, "spec/stress/fork_reconnect_stress_spec.rb".freeze, "spec/stress/connection_pool_timing_spec.rb".freeze, "spec/stress/cleanup_spec.rb".freeze, "spec/stress/connection_pool_stress_spec.rb".freeze, "spec/README.aws-auth.md".freeze, "spec/USERS.md".freeze, "spec/runners/connection_string.rb".freeze, "spec/runners/sdam/verifier.rb".freeze, "spec/runners/transactions/operation.rb".freeze, "spec/runners/transactions/test.rb".freeze, "spec/runners/transactions/spec.rb".freeze, "spec/runners/change_streams/test.rb".freeze, "spec/runners/change_streams/outcome.rb".freeze, "spec/runners/change_streams/spec.rb".freeze, "spec/runners/unified/support_operations.rb".freeze, "spec/runners/unified/test_group.rb".freeze, "spec/runners/unified/assertions.rb".freeze, "spec/runners/unified/crud_operations.rb".freeze, "spec/runners/unified/event_subscriber.rb".freeze, "spec/runners/unified/test.rb".freeze, "spec/runners/unified/exceptions.rb".freeze, "spec/runners/unified/error.rb".freeze, "spec/runners/unified/grid_fs_operations.rb".freeze, "spec/runners/unified/ddl_operations.rb".freeze, "spec/runners/unified/change_stream_operations.rb".freeze, "spec/runners/unified/entity_map.rb".freeze, "spec/runners/crud/operation.rb".freeze, "spec/runners/crud/test_base.rb".freeze, "spec/runners/crud/test.rb".freeze, "spec/runners/crud/outcome.rb".freeze, "spec/runners/crud/context.rb".freeze, "spec/runners/crud/requirement.rb".freeze, "spec/runners/crud/verifier.rb".freeze, "spec/runners/crud/spec.rb".freeze, "spec/runners/cmap.rb".freeze, "spec/runners/command_monitoring.rb".freeze, "spec/runners/gridfs.rb".freeze, "spec/runners/transactions.rb".freeze, "spec/runners/read_write_concern_document.rb".freeze, "spec/runners/server_selection_rtt.rb".freeze, "spec/runners/unified.rb".freeze, "spec/runners/auth.rb".freeze, "spec/runners/cmap/verifier.rb".freeze, "spec/runners/sdam.rb".freeze, "spec/runners/server_selection.rb".freeze, "spec/runners/crud.rb".freeze, "spec/integration/ocsp_connectivity_spec.rb".freeze, "spec/integration/bulk_write_error_message_spec.rb".freeze, "spec/integration/query_cache_spec.rb".freeze, "spec/integration/sdam_events_spec.rb".freeze, "spec/integration/operation_failure_message_spec.rb".freeze, "spec/integration/ssl_uri_options_spec.rb".freeze, "spec/integration/connection_pool_populator_spec.rb".freeze, "spec/integration/aws_credentials_retriever_spec.rb".freeze, "spec/integration/server_selector_spec.rb".freeze, "spec/integration/check_clean_slate_spec.rb".freeze, "spec/integration/client_authentication_options_spec.rb".freeze, "spec/integration/size_limit_spec.rb".freeze, "spec/integration/crud_spec.rb".freeze, "spec/integration/heartbeat_events_spec.rb".freeze, "spec/integration/client_spec.rb".freeze, "spec/integration/server_selection_spec.rb".freeze, "spec/integration/step_down_spec.rb".freeze, "spec/integration/retryable_writes_errors_spec.rb".freeze, "spec/integration/bulk_write_spec.rb".freeze, "spec/integration/transactions_api_examples_spec.rb".freeze, "spec/integration/change_stream_examples_spec.rb".freeze, "spec/integration/map_reduce_spec.rb".freeze, "spec/integration/snappy_compression_spec.rb".freeze, "spec/integration/sdam_prose_spec.rb".freeze, "spec/integration/server_description_spec.rb".freeze, "spec/integration/client_connectivity_spec.rb".freeze, "spec/integration/versioned_api_examples_spec.rb".freeze, "spec/integration/auth_spec.rb".freeze, "spec/integration/command_monitoring_spec.rb".freeze, "spec/integration/server_spec.rb".freeze, "spec/integration/shell_examples_spec.rb".freeze, "spec/integration/retryable_writes/retryable_writes_40_and_newer_spec.rb".freeze, "spec/integration/retryable_writes/retryable_writes_36_and_older_spec.rb".freeze, "spec/integration/retryable_writes/shared/does_not_support_retries.rb".freeze, "spec/integration/retryable_writes/shared/performs_modern_retries.rb".freeze, "spec/integration/retryable_writes/shared/supports_retries.rb".freeze, "spec/integration/retryable_writes/shared/supports_legacy_retries.rb".freeze, "spec/integration/retryable_writes/shared/performs_legacy_retries.rb".freeze, "spec/integration/retryable_writes/shared/supports_modern_retries.rb".freeze, "spec/integration/retryable_writes/shared/only_supports_legacy_retries.rb".freeze, "spec/integration/retryable_writes/shared/adds_diagnostics.rb".freeze, "spec/integration/retryable_writes/shared/performs_no_retries.rb".freeze, "spec/integration/reconnect_spec.rb".freeze, "spec/integration/secondary_reads_spec.rb".freeze, "spec/integration/x509_auth_spec.rb".freeze, "spec/integration/grid_fs_bucket_spec.rb".freeze, "spec/integration/truncated_utf8_spec.rb".freeze, "spec/integration/bulk_insert_spec.rb".freeze, "spec/integration/transaction_pinning_spec.rb".freeze, "spec/integration/cursor_reaping_spec.rb".freeze, "spec/integration/zlib_compression_spec.rb".freeze, "spec/integration/ocsp_verifier_cache_spec.rb".freeze, "spec/integration/client_construction_spec.rb".freeze, "spec/integration/awaited_ismaster_spec.rb".freeze, "spec/integration/client_construction_aws_auth_spec.rb".freeze, "spec/integration/get_more_spec.rb".freeze, "spec/integration/srv_monitoring_spec.rb".freeze, "spec/integration/error_detection_spec.rb".freeze, "spec/integration/collection_indexes_prose_spec.rb".freeze, "spec/integration/transactions_examples_spec.rb".freeze, "spec/integration/connection_spec.rb".freeze, "spec/integration/read_preference_spec.rb".freeze, "spec/integration/change_stream_spec.rb".freeze, "spec/integration/connect_single_rs_name_spec.rb".freeze, "spec/integration/bson_symbol_spec.rb".freeze, "spec/integration/srv_spec.rb".freeze, "spec/integration/mongos_pinning_spec.rb".freeze, "spec/integration/aws_auth_request_spec.rb".freeze, "spec/integration/cursor_pinning_spec.rb".freeze, "spec/integration/ocsp_verifier_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_reconnect_spec.rb".freeze, "spec/integration/client_side_encryption/external_key_vault_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_mongocryptd_spawn_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_spec.rb".freeze, "spec/integration/client_side_encryption/bypass_mongocryptd_spawn_spec.rb".freeze, "spec/integration/client_side_encryption/client_close_spec.rb".freeze, "spec/integration/client_side_encryption/explicit_encryption_spec.rb".freeze, "spec/integration/client_side_encryption/custom_endpoint_spec.rb".freeze, "spec/integration/client_side_encryption/views_spec.rb".freeze, "spec/integration/client_side_encryption/bson_size_limit_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_command_monitoring_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_bulk_writes_spec.rb".freeze, "spec/integration/client_side_encryption/corpus_spec.rb".freeze, "spec/integration/client_side_encryption/data_key_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_old_wire_version_spec.rb".freeze, "spec/integration/query_cache_transactions_spec.rb".freeze, "spec/integration/command_spec.rb".freeze, "spec/integration/mmapv1_spec.rb".freeze, "spec/integration/time_zone_querying_spec.rb".freeze, "spec/integration/fork_reconnect_spec.rb".freeze, "spec/integration/docs_examples_spec.rb".freeze, "spec/integration/read_concern_spec.rb".freeze, "spec/integration/retryable_errors_spec.rb".freeze, "spec/integration/sdam_error_handling_spec.rb".freeze, "spec/integration/client_update_spec.rb".freeze, "spec/integration/operation_failure_code_spec.rb".freeze, "spec/integration/zstd_compression_spec.rb".freeze, "spec/integration/server_monitor_spec.rb".freeze, "spec/solo/clean_exit_spec.rb".freeze, "spec/shared/bin/s3-copy".freeze, "spec/shared/bin/s3-upload".freeze, "spec/shared/bin/get-mongodb-download-url".freeze, "spec/shared/lib/mrss/server_version_registry.rb".freeze, "spec/shared/lib/mrss/event_subscriber.rb".freeze, "spec/shared/lib/mrss/child_process_helper.rb".freeze, "spec/shared/lib/mrss/utils.rb".freeze, "spec/shared/lib/mrss/spec_organizer.rb".freeze, "spec/shared/lib/mrss/docker_runner.rb".freeze, "spec/shared/lib/mrss/constraints.rb".freeze, "spec/shared/lib/mrss/lite_constraints.rb".freeze, "spec/shared/lib/mrss/cluster_config.rb".freeze, "spec/shared/LICENSE".freeze, "spec/shared/shlib/set_env.sh".freeze, "spec/shared/shlib/server.sh".freeze, "spec/shared/shlib/distro.sh".freeze, "spec/shared/share/Dockerfile.erb".freeze, "spec/shared/share/haproxy-1.conf".freeze, "spec/shared/share/haproxy-2.conf".freeze, "spec/lite_spec_helper.rb".freeze, "spec/spec_tests/cmap_spec.rb".freeze, "spec/spec_tests/connection_string_spec.rb".freeze, "spec/spec_tests/seed_list_discovery_spec.rb".freeze, "spec/spec_tests/sdam_monitoring_spec.rb".freeze, "spec/spec_tests/sdam_integration_spec.rb".freeze, "spec/spec_tests/crud_spec.rb".freeze, "spec/spec_tests/server_selection_spec.rb".freeze, "spec/spec_tests/retryable_writes_spec.rb".freeze, "spec/spec_tests/load_balancers_spec.rb".freeze, "spec/spec_tests/auth_spec.rb".freeze, "spec/spec_tests/command_monitoring_spec.rb".freeze, "spec/spec_tests/read_write_concern_document_spec.rb".freeze, "spec/spec_tests/transactions_api_spec.rb".freeze, "spec/spec_tests/uri_options_spec.rb".freeze, "spec/spec_tests/crud_unified_spec.rb".freeze, "spec/spec_tests/versioned_api_spec.rb".freeze, "spec/spec_tests/gridfs_spec.rb".freeze, "spec/spec_tests/client_side_encryption_spec.rb".freeze, "spec/spec_tests/retryable_reads_spec.rb".freeze, "spec/spec_tests/max_staleness_spec.rb".freeze, "spec/spec_tests/sdam_spec.rb".freeze, "spec/spec_tests/read_write_concern_connection_string_spec.rb".freeze, "spec/spec_tests/command_monitoring_unified_spec.rb".freeze, "spec/spec_tests/read_write_concern_operaton_spec.rb".freeze, "spec/spec_tests/server_selection_rtt_spec.rb".freeze, "spec/spec_tests/change_streams_unified_spec.rb".freeze, "spec/spec_tests/collection_management_spec.rb".freeze, "spec/spec_tests/change_streams_spec.rb".freeze, "spec/spec_tests/transactions_unified_spec.rb".freeze, "spec/spec_tests/transactions_spec.rb".freeze, "spec/spec_tests/data/sdam_monitoring/load_balancer.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_me_mismatch.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone_repeated.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_second_seed_removal.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_primary_address_change.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_primary_and_secondary.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_other_change.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_other_chain.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_primary.yml".freeze, "spec/spec_tests/data/sdam_monitoring/discovered_standalone.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_removal.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone_suppress_equal_description_changes.yml".freeze, "spec/spec_tests/data/sdam_monitoring/required_replica_set.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_primary_removal.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_no_primary.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone_to_rs_with_me_mismatch.yml".freeze, "spec/spec_tests/data/server_selection_rtt/first_value.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_3.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_2.yml".freeze, "spec/spec_tests/data/server_selection_rtt/first_value_zero.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_1.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_5.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_4.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-network.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/prefer-error-code.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-network-timeout-error.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-timeout.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/error_handling_handshake.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-network.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-network-error.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-timeout.yml".freeze, "spec/spec_tests/data/sdam/errors/write_errors_ignored.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_rsarbiter.yml".freeze, "spec/spec_tests/data/sdam/single/too_old.yml".freeze, "spec/spec_tests/data/sdam/single/too_old_then_upgraded.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_mongos.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_rsprimary.yml".freeze, "spec/spec_tests/data/sdam/single/standalone_using_legacy_hello.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_replicaset.yml".freeze, "spec/spec_tests/data/sdam/single/discover_unavailable_seed.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_unavailable_seed.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_standalone.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_rssecondary.yml".freeze, "spec/spec_tests/data/sdam/single/ruby_primary_different_address.yml".freeze, "spec/spec_tests/data/sdam/single/too_new.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_external_ip.yml".freeze, "spec/spec_tests/data/sdam/single/compatible.yml".freeze, "spec/spec_tests/data/sdam/single/standalone_removed.yml".freeze, "spec/spec_tests/data/sdam/single/not_ok_response.yml".freeze, "spec/spec_tests/data/sdam/single/ruby_primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/single/discover_standalone.yml".freeze, "spec/spec_tests/data/sdam/single/ls_timeout_standalone.yml".freeze, "spec/spec_tests/data/sdam/load-balanced/discover_load_balancer.yml".freeze, "spec/spec_tests/data/sdam/rs/ls_timeout.yml".freeze, "spec/spec_tests/data/sdam/rs/incompatible_other.yml".freeze, "spec/spec_tests/data/sdam/rs/hosts_differ_from_seeds.yml".freeze, "spec/spec_tests/data/sdam/rs/ruby_secondary_wrong_set_name_with_primary_second.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_wrong_set_name_with_primary.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_to_no_primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/non_rs_member.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_secondary_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_primary.yml".freeze, "spec/spec_tests/data/sdam/rs/too_old.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_ghost_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/topology_version_greater.yml".freeze, "spec/spec_tests/data/sdam/rs/normalize_case_me.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_changes_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_secondary.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_rsother_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_mismatched_me_not_removed.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_rsother.yml".freeze, "spec/spec_tests/data/sdam/rs/response_from_removed.yml".freeze, "spec/spec_tests/data/sdam/rs/null_election_id.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_disconnect_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_disconnect.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary_new_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/normalize_case.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_reports_new_member.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_arbiters_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/topology_version_less.yml".freeze, "spec/spec_tests/data/sdam/rs/sec_not_auth.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/discovery.yml".freeze, "spec/spec_tests/data/sdam/rs/use_setversion_without_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_hint_from_secondary_with_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_primary_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_becomes_standalone.yml".freeze, "spec/spec_tests/data/sdam/rs/setversion_without_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/unexpected_mongos.yml".freeze, "spec/spec_tests/data/sdam/rs/member_reconfig.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_disconnect_setversion.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/repeated.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_arbiters.yml".freeze, "spec/spec_tests/data/sdam/rs/topology_version_equal.yml".freeze, "spec/spec_tests/data/sdam/rs/incompatible_arbiter.yml".freeze, "spec/spec_tests/data/sdam/rs/compatible_unknown.yml".freeze, "spec/spec_tests/data/sdam/rs/too_new.yml".freeze, "spec/spec_tests/data/sdam/rs/ruby_primary_address_change.yml".freeze, "spec/spec_tests/data/sdam/rs/incompatible_ghost.yml".freeze, "spec/spec_tests/data/sdam/rs/wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary_new_setversion.yml".freeze, "spec/spec_tests/data/sdam/rs/member_standalone.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_becomes_mongos.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_hidden_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/compatible.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_ignore_ok_0.yml".freeze, "spec/spec_tests/data/sdam/rs/replicaset_rsnp.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_ghost.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_passives_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/stepdown_change_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_becomes_ghost.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_hidden.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_passives.yml".freeze, "spec/spec_tests/data/sdam/rs/equal_electionids.yml".freeze, "spec/spec_tests/data/sdam/sharded/non_mongos_removed.yml".freeze, "spec/spec_tests/data/sdam/sharded/multiple_mongoses.yml".freeze, "spec/spec_tests/data/sdam/sharded/too_old.yml".freeze, "spec/spec_tests/data/sdam/sharded/mongos_disconnect.yml".freeze, "spec/spec_tests/data/sdam/sharded/discover_single_mongos.yml".freeze, "spec/spec_tests/data/sdam/sharded/ls_timeout_mongos.yml".freeze, "spec/spec_tests/data/sdam/sharded/ruby_primary_different_address.yml".freeze, "spec/spec_tests/data/sdam/sharded/too_new.yml".freeze, "spec/spec_tests/data/sdam/sharded/compatible.yml".freeze, "spec/spec_tests/data/sdam/sharded/ruby_primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/sharded/normalize_uri_case.yml".freeze, "spec/spec_tests/data/auth/connection-string.yml".freeze, "spec/spec_tests/data/transactions/errors.yml".freeze, "spec/spec_tests/data/transactions/insert.yml".freeze, "spec/spec_tests/data/transactions/error-labels.yml".freeze, "spec/spec_tests/data/transactions/delete.yml".freeze, "spec/spec_tests/data/transactions/mongos-pin-auto.yml".freeze, "spec/spec_tests/data/transactions/write-concern.yml".freeze, "spec/spec_tests/data/transactions/run-command.yml".freeze, "spec/spec_tests/data/transactions/pin-mongos.yml".freeze, "spec/spec_tests/data/transactions/update.yml".freeze, "spec/spec_tests/data/transactions/errors-client.yml".freeze, "spec/spec_tests/data/transactions/mongos-recovery-token.yml".freeze, "spec/spec_tests/data/transactions/findOneAndDelete.yml".freeze, "spec/spec_tests/data/transactions/transaction-options-repl.yml".freeze, "spec/spec_tests/data/transactions/findOneAndReplace.yml".freeze, "spec/spec_tests/data/transactions/bulk.yml".freeze, "spec/spec_tests/data/transactions/create-index.yml".freeze, "spec/spec_tests/data/transactions/transaction-options.yml".freeze, "spec/spec_tests/data/transactions/isolation.yml".freeze, "spec/spec_tests/data/transactions/retryable-commit.yml".freeze, "spec/spec_tests/data/transactions/read-pref.yml".freeze, "spec/spec_tests/data/transactions/reads.yml".freeze, "spec/spec_tests/data/transactions/retryable-abort-errorLabels.yml".freeze, "spec/spec_tests/data/transactions/abort.yml".freeze, "spec/spec_tests/data/transactions/count.yml".freeze, "spec/spec_tests/data/transactions/retryable-commit-errorLabels.yml".freeze, "spec/spec_tests/data/transactions/commit.yml".freeze, "spec/spec_tests/data/transactions/causal-consistency.yml".freeze, "spec/spec_tests/data/transactions/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/transactions/retryable-abort.yml".freeze, "spec/spec_tests/data/transactions/retryable-writes.yml".freeze, "spec/spec_tests/data/transactions/create-collection.yml".freeze, "spec/spec_tests/data/transactions/read-concern.yml".freeze, "spec/spec_tests/data/connection_string/valid-host_identifiers.yml".freeze, "spec/spec_tests/data/connection_string/valid-warnings.yml".freeze, "spec/spec_tests/data/connection_string/invalid-uris.yml".freeze, "spec/spec_tests/data/connection_string/valid-db-with-dotted-name.yml".freeze, "spec/spec_tests/data/connection_string/valid-unix_socket-absolute.yml".freeze, "spec/spec_tests/data/connection_string/valid-options.yml".freeze, "spec/spec_tests/data/connection_string/valid-auth.yml".freeze, "spec/spec_tests/data/connection_string/valid-unix_socket-relative.yml".freeze, "spec/spec_tests/data/change_streams/change-streams-errors.yml".freeze, "spec/spec_tests/data/change_streams/change-streams-resume-errorLabels.yml".freeze, "spec/spec_tests/data/change_streams/change-streams.yml".freeze, "spec/spec_tests/data/change_streams/change-streams-resume-allowlist.yml".freeze, "spec/spec_tests/data/load_balancers/server-selection.yml".freeze, "spec/spec_tests/data/load_balancers/non-lb-connection-establishment.yml".freeze, "spec/spec_tests/data/load_balancers/event-monitoring.yml".freeze, "spec/spec_tests/data/load_balancers/lb-connection-establishment.yml".freeze, "spec/spec_tests/data/unified/valid-fail/operation-failure.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-retryable-writes.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-gridfs.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-transactions-mongos-pin-auto.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-transactions-convenient-api.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-transactions.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-sessions.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-change-streams.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-retryable-reads.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-command-monitoring.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-crud.yml".freeze, "spec/spec_tests/data/retryable_writes/insertOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/insertMany-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndDelete-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/insertOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteOne.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndUpdate-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/bulkWrite.yml".freeze, "spec/spec_tests/data/retryable_writes/insertOne.yml".freeze, "spec/spec_tests/data/retryable_writes/updateOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/updateOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndDelete-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndDelete.yml".freeze, "spec/spec_tests/data/retryable_writes/updateOne.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndReplace.yml".freeze, "spec/spec_tests/data/retryable_writes/replaceOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/insertMany-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndUpdate-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/replaceOne.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/bulkWrite-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/insertMany.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndReplace-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/replaceOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/retryable_writes/bulkWrite-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndReplace-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/updateMany.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteMany.yml".freeze, "spec/spec_tests/data/max_staleness/Unknown/SmallMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Nearest.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/ZeroMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/MaxStalenessWithModePrimary.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/LastUpdateTime.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Nearest_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Nearest2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/MaxStalenessTooSmall.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Secondary_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Secondary_tags2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/LongHeartbeat2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/DefaultNoMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/LongHeartbeat.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/PrimaryPreferred_incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Nearest.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/ZeroMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/NoKnownServers.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/PrimaryPreferred_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/LastUpdateTime.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Nearest2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/MaxStalenessTooSmall.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/SecondaryPreferred_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Secondary.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/DefaultNoMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/Single/SmallMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/Single/Incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/Sharded/SmallMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/Sharded/Incompatible.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabases.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionNames-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.coll.watch-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/findOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/aggregate.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-serverErrors-pre4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseObjects-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexes.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabases-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.coll.watch.yml".freeze, "spec/spec_tests/data/retryable_reads/count-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-client.watch.yml".freeze, "spec/spec_tests/data/retryable_reads/countDocuments.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-downloadByName.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-downloadByName-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/countDocuments-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexes-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.watch-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexNames.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseNames.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseNames-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-download-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/distinct-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionNames.yml".freeze, "spec/spec_tests/data/retryable_reads/findOne.yml".freeze, "spec/spec_tests/data/retryable_reads/mapReduce.yml".freeze, "spec/spec_tests/data/retryable_reads/distinct.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.watch.yml".freeze, "spec/spec_tests/data/retryable_reads/aggregate-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseObjects.yml".freeze, "spec/spec_tests/data/retryable_reads/count.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-pre4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionObjects.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-client.watch-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionObjects-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/aggregate-merge.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollections.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-download.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexNames-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/find.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-serverErrors-4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/find-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollections-serverErrors.yml".freeze, "spec/spec_tests/data/transactions_unified/mongos-unpin.yml".freeze, "spec/spec_tests/data/versioned_api/crud-api-version-1-strict.yml".freeze, "spec/spec_tests/data/versioned_api/test-commands-strict-mode.yml".freeze, "spec/spec_tests/data/versioned_api/transaction-handling.yml".freeze, "spec/spec_tests/data/versioned_api/runcommand-helper-no-api-version-declared.yml".freeze, "spec/spec_tests/data/versioned_api/crud-api-version-1.yml".freeze, "spec/spec_tests/data/versioned_api/test-commands-deprecation-errors.yml".freeze, "spec/spec_tests/data/server_selection/Unknown/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Nearest.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Nearest_multiple.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Secondary_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_tags.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Secondary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Nearest_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Primary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Nearest.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Nearest_multiple.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags2.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PossiblePrimaryNearest.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Nearest_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PossiblePrimary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Primary.yml".freeze, "spec/spec_tests/data/server_selection/Single/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/Nearest.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/Secondary.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/Primary.yml".freeze, "spec/spec_tests/data/command_monitoring_unified/redacted-commands.yml".freeze, "spec/spec_tests/data/uri_options/auth-options.yml".freeze, "spec/spec_tests/data/uri_options/read-preference-options.yml".freeze, "spec/spec_tests/data/uri_options/ruby-connection-options.yml".freeze, "spec/spec_tests/data/uri_options/tls-options.yml".freeze, "spec/spec_tests/data/uri_options/connection-options.yml".freeze, "spec/spec_tests/data/uri_options/ruby-auth-options.yml".freeze, "spec/spec_tests/data/uri_options/connection-pool-options.yml".freeze, "spec/spec_tests/data/uri_options/compression-options.yml".freeze, "spec/spec_tests/data/uri_options/concern-options.yml".freeze, "spec/spec_tests/data/crud/read/aggregate-out.yml".freeze, "spec/spec_tests/data/crud/read/aggregate.yml".freeze, "spec/spec_tests/data/crud/read/aggregate-collation.yml".freeze, "spec/spec_tests/data/crud/read/distinct-collation.yml".freeze, "spec/spec_tests/data/crud/read/distinct.yml".freeze, "spec/spec_tests/data/crud/read/count.yml".freeze, "spec/spec_tests/data/crud/read/count-collation.yml".freeze, "spec/spec_tests/data/crud/read/find.yml".freeze, "spec/spec_tests/data/crud/read/find-collation.yml".freeze, "spec/spec_tests/data/crud/read/count-empty.yml".freeze, "spec/spec_tests/data/crud/write/updateOne-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace-upsert.yml".freeze, "spec/spec_tests/data/crud/write/deleteOne.yml".freeze, "spec/spec_tests/data/crud/write/deleteOne-collation.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndUpdate-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/bulkWrite-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/bulkWrite.yml".freeze, "spec/spec_tests/data/crud/write/updateMany-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndUpdate-collation.yml".freeze, "spec/spec_tests/data/crud/write/insertOne.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne-collation.yml".freeze, "spec/spec_tests/data/crud/write/updateOne-collation.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndDelete.yml".freeze, "spec/spec_tests/data/crud/write/updateOne.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace.yml".freeze, "spec/spec_tests/data/crud/write/updateMany-collation.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne.yml".freeze, "spec/spec_tests/data/crud/write/updateOne-pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/bulkWrite-collation.yml".freeze, "spec/spec_tests/data/crud/write/insertMany.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace-collation.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndDelete-collation.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne-upsert.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/crud/write/updateMany-pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/updateMany.yml".freeze, "spec/spec_tests/data/crud/write/deleteMany-collation.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne-pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace-upsert_pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/deleteMany.yml".freeze, "spec/spec_tests/data/collection_management/timeseries-collection.yml".freeze, "spec/spec_tests/data/sdam_integration/insert-network-error.yml".freeze, "spec/spec_tests/data/sdam_integration/find-network-error.yml".freeze, "spec/spec_tests/data/sdam_integration/hello-command-error.yml".freeze, "spec/spec_tests/data/sdam_integration/connectTimeoutMS.yml".freeze, "spec/spec_tests/data/sdam_integration/find-shutdown-error.yml".freeze, "spec/spec_tests/data/sdam_integration/insert-shutdown-error.yml".freeze, "spec/spec_tests/data/sdam_integration/hello-timeout.yml".freeze, "spec/spec_tests/data/sdam_integration/cancel-server-check.yml".freeze, "spec/spec_tests/data/sdam_integration/hello-network-error.yml".freeze, "spec/spec_tests/data/sdam_integration/rediscover-quickly-after-step-down.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-with-overridden-ssl-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-not-allowed-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/one-result-default-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/misformatted-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-admin-database.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch2.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch4.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/two-results-nonstandard-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-with-unallowed-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/one-txt-record-multiple-strings.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/two-results-default-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/longer-parent-in-return.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/encoded-userinfo-and-db.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch5.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch1.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/returned-parent-wrong.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/direct-connection-false.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-two-hosts.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/direct-connection-true.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/not-enough-parts.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/loadBalanced-false-txt.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/no-results.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/one-txt-record.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/returned-parent-too-short.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-auth.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/two-txt-records.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch3.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-with-overridden-uri-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-true-txt.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-true-multiple-hosts.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-replicaSet-errors.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-directConnection.yml".freeze, "spec/spec_tests/data/transactions_api/callback-commits.yml".freeze, "spec/spec_tests/data/transactions_api/callback-aborts.yml".freeze, "spec/spec_tests/data/transactions_api/commit-retry.yml".freeze, "spec/spec_tests/data/transactions_api/commit-transienttransactionerror.yml".freeze, "spec/spec_tests/data/transactions_api/transaction-options.yml".freeze, "spec/spec_tests/data/transactions_api/callback-retry.yml".freeze, "spec/spec_tests/data/transactions_api/commit-writeconcernerror.yml".freeze, "spec/spec_tests/data/transactions_api/commit-transienttransactionerror-4.2.yml".freeze, "spec/spec_tests/data/transactions_api/commit.yml".freeze, "spec/spec_tests/data/client_side_encryption/bypassAutoEncryption.yml".freeze, "spec/spec_tests/data/client_side_encryption/aggregate.yml".freeze, "spec/spec_tests/data/client_side_encryption/malformedCiphertext.yml".freeze, "spec/spec_tests/data/client_side_encryption/insert.yml".freeze, "spec/spec_tests/data/client_side_encryption/types.yml".freeze, "spec/spec_tests/data/client_side_encryption/getMore.yml".freeze, "spec/spec_tests/data/client_side_encryption/delete.yml".freeze, "spec/spec_tests/data/client_side_encryption/maxWireVersion.yml".freeze, "spec/spec_tests/data/client_side_encryption/localSchema.yml".freeze, "spec/spec_tests/data/client_side_encryption/countDocuments.yml".freeze, "spec/spec_tests/data/client_side_encryption/badQueries.yml".freeze, "spec/spec_tests/data/client_side_encryption/bypassedCommand.yml".freeze, "spec/spec_tests/data/client_side_encryption/findOneAndDelete.yml".freeze, "spec/spec_tests/data/client_side_encryption/updateOne.yml".freeze, "spec/spec_tests/data/client_side_encryption/missingKey.yml".freeze, "spec/spec_tests/data/client_side_encryption/findOneAndReplace.yml".freeze, "spec/spec_tests/data/client_side_encryption/bulk.yml".freeze, "spec/spec_tests/data/client_side_encryption/keyAltName.yml".freeze, "spec/spec_tests/data/client_side_encryption/explain.yml".freeze, "spec/spec_tests/data/client_side_encryption/replaceOne.yml".freeze, "spec/spec_tests/data/client_side_encryption/distinct.yml".freeze, "spec/spec_tests/data/client_side_encryption/count.yml".freeze, "spec/spec_tests/data/client_side_encryption/badSchema.yml".freeze, "spec/spec_tests/data/client_side_encryption/localKMS.yml".freeze, "spec/spec_tests/data/client_side_encryption/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/client_side_encryption/unsupportedCommand.yml".freeze, "spec/spec_tests/data/client_side_encryption/updateMany.yml".freeze, "spec/spec_tests/data/client_side_encryption/find.yml".freeze, "spec/spec_tests/data/client_side_encryption/basic.yml".freeze, "spec/spec_tests/data/crud_unified/updateWithPipelines.yml".freeze, "spec/spec_tests/data/crud_unified/estimatedDocumentCount.yml".freeze, "spec/spec_tests/data/command_monitoring/deleteOne.yml".freeze, "spec/spec_tests/data/command_monitoring/unacknowledgedBulkWrite.yml".freeze, "spec/spec_tests/data/command_monitoring/bulkWrite.yml".freeze, "spec/spec_tests/data/command_monitoring/insertOne.yml".freeze, "spec/spec_tests/data/command_monitoring/updateOne.yml".freeze, "spec/spec_tests/data/command_monitoring/command.yml".freeze, "spec/spec_tests/data/command_monitoring/insertMany.yml".freeze, "spec/spec_tests/data/command_monitoring/updateMany.yml".freeze, "spec/spec_tests/data/command_monitoring/find.yml".freeze, "spec/spec_tests/data/command_monitoring/deleteMany.yml".freeze, "spec/spec_tests/data/cmap/pool-create-with-options.yml".freeze, "spec/spec_tests/data/cmap/pool-close.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin-make-available.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-no-stale.yml".freeze, "spec/spec_tests/data/cmap/wait-queue-timeout.yml".freeze, "spec/spec_tests/data/cmap/pool-create.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin.yml".freeze, "spec/spec_tests/data/cmap/pool-create-min-size.yml".freeze, "spec/spec_tests/data/cmap/wait-queue-fairness.yml".freeze, "spec/spec_tests/data/cmap/connection-must-have-id.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin-destroy-closed.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-multiple.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin-destroy-stale.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-error-closed.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-connection.yml".freeze, "spec/spec_tests/data/cmap/connection-must-order-ids.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-no-idle.yml".freeze, "spec/spec_tests/data/cmap/pool-create-max-size.yml".freeze, "spec/spec_tests/data/cmap/pool-close-destroy-conns.yml".freeze, "spec/spec_tests/data/change_streams_unified/change-streams.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-findOneAndDelete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndUpdate-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-replaceOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteMany-hint.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-update-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-delete-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/aggregate-out-readConcern.yml".freeze, "spec/spec_tests/data/crud_v2/updateMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateOne-hint.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-deleteOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-arrayFilters.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-bulkWrite-delete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateMany-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/find-allowdiskuse-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-updateMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-findOneAndUpdate-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndReplace-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteOne-hint.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-update-hint.yml".freeze, "spec/spec_tests/data/crud_v2/deleteMany-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-delete-hint.yml".freeze, "spec/spec_tests/data/crud_v2/db-aggregate.yml".freeze, "spec/spec_tests/data/crud_v2/updateOne-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/replaceOne-hint.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndDelete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateWithPipelines.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-update-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-delete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/find-allowdiskuse.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-updateOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndUpdate-hint.yml".freeze, "spec/spec_tests/data/crud_v2/find-allowdiskuse-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-deleteMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndDelete-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/aggregate-merge.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndReplace-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/updateOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-findOneAndReplace-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteOne-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-bulkWrite-update-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndReplace-hint.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndDelete-hint.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndUpdate-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateMany-hint.yml".freeze, "spec/spec_tests/data/gridfs/download.yml".freeze, "spec/spec_tests/data/gridfs/delete.yml".freeze, "spec/spec_tests/data/gridfs/upload.yml".freeze, "spec/spec_tests/data/gridfs/download_by_name.yml".freeze, "spec/spec_tests/data/read_write_concern/connection-string/write-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/connection-string/read-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/document/write-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/document/read-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-2.6.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-4.2.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-3.2.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-3.4.yml".freeze, "spec/spec_tests/unified_spec.rb".freeze, "spec/mongo/query_cache_spec.rb".freeze, "spec/mongo/cluster/cursor_reaper_spec.rb".freeze, "spec/mongo/cluster/topology_spec.rb".freeze, "spec/mongo/cluster/socket_reaper_spec.rb".freeze, "spec/mongo/cluster/topology/unknown_spec.rb".freeze, "spec/mongo/cluster/topology/sharded_spec.rb".freeze, "spec/mongo/cluster/topology/single_spec.rb".freeze, "spec/mongo/cluster/topology/replica_set_spec.rb".freeze, "spec/mongo/cluster/periodic_executor_spec.rb".freeze, "spec/mongo/logger_spec.rb".freeze, "spec/mongo/cursor_spec.rb".freeze, "spec/mongo/id_spec.rb".freeze, "spec/mongo/server_selector_spec.rb".freeze, "spec/mongo/auth/stringprep_spec.rb".freeze, "spec/mongo/auth/ldap/conversation_spec.rb".freeze, "spec/mongo/auth/stringprep/profiles/sasl_spec.rb".freeze, "spec/mongo/auth/gssapi/conversation_spec.rb".freeze, "spec/mongo/auth/x509_spec.rb".freeze, "spec/mongo/auth/cr_spec.rb".freeze, "spec/mongo/auth/ldap_spec.rb".freeze, "spec/mongo/auth/scram_negotiation_spec.rb".freeze, "spec/mongo/auth/scram256/conversation_spec.rb".freeze, "spec/mongo/auth/user_spec.rb".freeze, "spec/mongo/auth/invalid_mechanism_spec.rb".freeze, "spec/mongo/auth/scram/conversation_spec.rb".freeze, "spec/mongo/auth/scram_spec.rb".freeze, "spec/mongo/auth/x509/conversation_spec.rb".freeze, "spec/mongo/auth/user/view_spec.rb".freeze, "spec/mongo/auth/aws/request_region_spec.rb".freeze, "spec/mongo/auth/aws/request_spec.rb".freeze, "spec/mongo/distinguishing_semaphore_spec.rb".freeze, "spec/mongo/grid/stream/write_spec.rb".freeze, "spec/mongo/grid/stream/read_spec.rb".freeze, "spec/mongo/grid/file_spec.rb".freeze, "spec/mongo/grid/fs_bucket_spec.rb".freeze, "spec/mongo/grid/stream_spec.rb".freeze, "spec/mongo/grid/file/info_spec.rb".freeze, "spec/mongo/grid/file/chunk_spec.rb".freeze, "spec/mongo/tls_context_hooks_spec.rb".freeze, "spec/mongo/options/redacted_spec.rb".freeze, "spec/mongo/client_spec.rb".freeze, "spec/mongo/lint_spec.rb".freeze, "spec/mongo/bulk_write_spec.rb".freeze, "spec/mongo/bulk_write/unordered_combiner_spec.rb".freeze, "spec/mongo/bulk_write/result_spec.rb".freeze, "spec/mongo/bulk_write/ordered_combiner_spec.rb".freeze, "spec/mongo/write_concern/unacknowledged_spec.rb".freeze, "spec/mongo/write_concern/acknowledged_spec.rb".freeze, "spec/mongo/dbref_spec.rb".freeze, "spec/mongo/auth_spec.rb".freeze, "spec/mongo/server_spec.rb".freeze, "spec/mongo/caching_cursor_spec.rb".freeze, "spec/mongo/uri/srv_protocol_spec.rb".freeze, "spec/mongo/cluster_time_spec.rb".freeze, "spec/mongo/session_transaction_spec.rb".freeze, "spec/mongo/query_cache_middleware_spec.rb".freeze, "spec/mongo/client_construction_spec.rb".freeze, "spec/mongo/address/ipv6_spec.rb".freeze, "spec/mongo/address/validator_spec.rb".freeze, "spec/mongo/address/ipv4_spec.rb".freeze, "spec/mongo/address/unix_spec.rb".freeze, "spec/mongo/operation/indexes_spec.rb".freeze, "spec/mongo/operation/limited_spec.rb".freeze, "spec/mongo/operation/aggregate_spec.rb".freeze, "spec/mongo/operation/insert/bulk_spec.rb".freeze, "spec/mongo/operation/insert/op_msg_spec.rb".freeze, "spec/mongo/operation/insert/command_spec.rb".freeze, "spec/mongo/operation/delete_spec.rb".freeze, "spec/mongo/operation/update_spec.rb".freeze, "spec/mongo/operation/specifiable_spec.rb".freeze, "spec/mongo/operation/map_reduce_spec.rb".freeze, "spec/mongo/operation/result_spec.rb".freeze, "spec/mongo/operation/kill_cursors_spec.rb".freeze, "spec/mongo/operation/aggregate/result_spec.rb".freeze, "spec/mongo/operation/collections_info_spec.rb".freeze, "spec/mongo/operation/create_index_spec.rb".freeze, "spec/mongo/operation/update/bulk_spec.rb".freeze, "spec/mongo/operation/update/op_msg_spec.rb".freeze, "spec/mongo/operation/update/command_spec.rb".freeze, "spec/mongo/operation/find/builder/modifiers_spec.rb".freeze, "spec/mongo/operation/find/builder/flags_spec.rb".freeze, "spec/mongo/operation/find/legacy_spec.rb".freeze, "spec/mongo/operation/read_preference_op_msg_spec.rb".freeze, "spec/mongo/operation/get_more_spec.rb".freeze, "spec/mongo/operation/create_user_spec.rb".freeze, "spec/mongo/operation/remove_user_spec.rb".freeze, "spec/mongo/operation/command_spec.rb".freeze, "spec/mongo/operation/update_user_spec.rb".freeze, "spec/mongo/operation/delete/bulk_spec.rb".freeze, "spec/mongo/operation/delete/op_msg_spec.rb".freeze, "spec/mongo/operation/delete/command_spec.rb".freeze, "spec/mongo/operation/read_preference_legacy_spec.rb".freeze, "spec/mongo/operation/drop_index_spec.rb".freeze, "spec/mongo/operation/insert_spec.rb".freeze, "spec/mongo/server/monitor_spec.rb".freeze, "spec/mongo/server/connection_auth_spec.rb".freeze, "spec/mongo/server/app_metadata_spec.rb".freeze, "spec/mongo/server/monitor/app_metadata_spec.rb".freeze, "spec/mongo/server/monitor/connection_spec.rb".freeze, "spec/mongo/server/description_spec.rb".freeze, "spec/mongo/server/round_trip_time_averager_spec.rb".freeze, "spec/mongo/server/description_query_methods_spec.rb".freeze, "spec/mongo/server/description/features_spec.rb".freeze, "spec/mongo/server/connection_common_spec.rb".freeze, "spec/mongo/server/connection_pool/populator_spec.rb".freeze, "spec/mongo/server/connection_spec.rb".freeze, "spec/mongo/server/connection_pool_spec.rb".freeze, "spec/mongo/error/notable_spec.rb".freeze, "spec/mongo/error/operation_failure_heavy_spec.rb".freeze, "spec/mongo/error/crypt_error_spec.rb".freeze, "spec/mongo/error/unsupported_option_spec.rb".freeze, "spec/mongo/error/parser_spec.rb".freeze, "spec/mongo/error/operation_failure_spec.rb".freeze, "spec/mongo/error/max_bson_size_spec.rb".freeze, "spec/mongo/error/bulk_write_error_spec.rb".freeze, "spec/mongo/error/no_server_available_spec.rb".freeze, "spec/mongo/socket_spec.rb".freeze, "spec/mongo/index/view_spec.rb".freeze, "spec/mongo/event/subscriber_spec.rb".freeze, "spec/mongo/event/publisher_spec.rb".freeze, "spec/mongo/cluster_spec.rb".freeze, "spec/mongo/crypt/auto_encryption_context_spec.rb".freeze, "spec/mongo/crypt/encryption_io_spec.rb".freeze, "spec/mongo/crypt/status_spec.rb".freeze, "spec/mongo/crypt/auto_decryption_context_spec.rb".freeze, "spec/mongo/crypt/auto_encrypter_spec.rb".freeze, "spec/mongo/crypt/binding/status_spec.rb".freeze, "spec/mongo/crypt/binding/mongocrypt_spec.rb".freeze, "spec/mongo/crypt/binding/context_spec.rb".freeze, "spec/mongo/crypt/binding/version_spec.rb".freeze, "spec/mongo/crypt/binding/binary_spec.rb".freeze, "spec/mongo/crypt/binding/helpers_spec.rb".freeze, "spec/mongo/crypt/helpers/mongo_crypt_spec_helper.rb".freeze, "spec/mongo/crypt/explicit_decryption_context_spec.rb".freeze, "spec/mongo/crypt/handle_spec.rb".freeze, "spec/mongo/crypt/explicit_encryption_context_spec.rb".freeze, "spec/mongo/crypt/binary_spec.rb".freeze, "spec/mongo/crypt/binding_unloaded_spec.rb".freeze, "spec/mongo/crypt/data_key_context_spec.rb".freeze, "spec/mongo/uri_spec.rb".freeze, "spec/mongo/collection_crud_spec.rb".freeze, "spec/mongo/collection/view/aggregation_spec.rb".freeze, "spec/mongo/collection/view/explainable_spec.rb".freeze, "spec/mongo/collection/view/builder/op_query_spec.rb".freeze, "spec/mongo/collection/view/builder/find_command_spec.rb".freeze, "spec/mongo/collection/view/iterable_spec.rb".freeze, "spec/mongo/collection/view/map_reduce_spec.rb".freeze, "spec/mongo/collection/view/immutable_spec.rb".freeze, "spec/mongo/collection/view/change_stream_resume_spec.rb".freeze, "spec/mongo/collection/view/readable_spec.rb".freeze, "spec/mongo/collection/view/change_stream_spec.rb".freeze, "spec/mongo/collection/view/writable_spec.rb".freeze, "spec/mongo/collection/view_spec.rb".freeze, "spec/mongo/collection_ddl_spec.rb".freeze, "spec/mongo/timeout_spec.rb".freeze, "spec/mongo/srv/monitor_spec.rb".freeze, "spec/mongo/srv/result_spec.rb".freeze, "spec/mongo/cursor/builder/op_get_more_spec.rb".freeze, "spec/mongo/cursor/builder/get_more_command_spec.rb".freeze, "spec/mongo/session_spec.rb".freeze, "spec/mongo/collection_spec.rb".freeze, "spec/mongo/retryable_spec.rb".freeze, "spec/mongo/utils_spec.rb".freeze, "spec/mongo/monitoring/command_log_subscriber_spec.rb".freeze, "spec/mongo/monitoring/event/topology_changed_spec.rb".freeze, "spec/mongo/monitoring/event/command_started_spec.rb".freeze, "spec/mongo/monitoring/event/server_heartbeat_succeeded_spec.rb".freeze, "spec/mongo/monitoring/event/command_succeeded_spec.rb".freeze, "spec/mongo/monitoring/event/command_failed_spec.rb".freeze, "spec/mongo/monitoring/event/server_opening_spec.rb".freeze, "spec/mongo/monitoring/event/server_closed_spec.rb".freeze, "spec/mongo/monitoring/event/topology_closed_spec.rb".freeze, "spec/mongo/monitoring/event/server_description_changed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_check_out_started_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_checked_out_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_checked_in_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_closed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_check_out_failed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/pool_closed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_ready_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_created_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/pool_created_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/pool_cleared_spec.rb".freeze, "spec/mongo/monitoring/event/server_heartbeat_started_spec.rb".freeze, "spec/mongo/monitoring/event/server_heartbeat_failed_spec.rb".freeze, "spec/mongo/monitoring/event/topology_opening_spec.rb".freeze, "spec/mongo/monitoring/event/secure_spec.rb".freeze, "spec/mongo/write_concern_spec.rb".freeze, "spec/mongo/server_selector/primary_spec.rb".freeze, "spec/mongo/server_selector/nearest_spec.rb".freeze, "spec/mongo/server_selector/secondary_spec.rb".freeze, "spec/mongo/server_selector/secondary_preferred_spec.rb".freeze, "spec/mongo/server_selector/primary_preferred_spec.rb".freeze, "spec/mongo/uri_option_parsing_spec.rb".freeze, "spec/mongo/address_spec.rb".freeze, "spec/mongo/semaphore_spec.rb".freeze, "spec/mongo/socket/tcp_spec.rb".freeze, "spec/mongo/socket/ssl_spec.rb".freeze, "spec/mongo/socket/unix_spec.rb".freeze, "spec/mongo/database_spec.rb".freeze, "spec/mongo/protocol/compressed_spec.rb".freeze, "spec/mongo/protocol/delete_spec.rb".freeze, "spec/mongo/protocol/update_spec.rb".freeze, "spec/mongo/protocol/msg_spec.rb".freeze, "spec/mongo/protocol/kill_cursors_spec.rb".freeze, "spec/mongo/protocol/reply_spec.rb".freeze, "spec/mongo/protocol/get_more_spec.rb".freeze, "spec/mongo/protocol/query_spec.rb".freeze, "spec/mongo/protocol/registry_spec.rb".freeze, "spec/mongo/protocol/insert_spec.rb".freeze, "spec/mongo/session/session_pool_spec.rb".freeze, "spec/mongo/session/server_session_spec.rb".freeze, "spec/mongo/client_encryption_spec.rb".freeze, "spec/mongo/monitoring_spec.rb".freeze, "spec/mongo/bson_spec.rb".freeze, "spec/NOTES.aws-auth.md".freeze, "spec/support/matchers.rb".freeze, "spec/support/aws_utils/inspector.rb".freeze, "spec/support/aws_utils/base.rb".freeze, "spec/support/aws_utils/provisioner.rb".freeze, "spec/support/aws_utils/orchestrator.rb".freeze, "spec/support/spec_setup.rb".freeze, "spec/support/cluster_tools.rb".freeze, "spec/support/json_ext_formatter.rb".freeze, "spec/support/certificates/crl.pem".freeze, "spec/support/certificates/client-x509.key".freeze, "spec/support/certificates/server-int.crt".freeze, "spec/support/certificates/client-encrypted.key".freeze, "spec/support/certificates/client-int.crt".freeze, "spec/support/certificates/client-second-level.key".freeze, "spec/support/certificates/server-second-level-bundle.pem".freeze, "spec/support/certificates/ca.crt".freeze, "spec/support/certificates/client-second-level-bundle.pem".freeze, "spec/support/certificates/multi-ca.crt".freeze, "spec/support/certificates/server.pem".freeze, "spec/support/certificates/client.crt".freeze, "spec/support/certificates/client-x509.crt".freeze, "spec/support/certificates/atlas-ocsp.crt".freeze, "spec/support/certificates/server-second-level.key".freeze, "spec/support/certificates/client-x509.pem".freeze, "spec/support/certificates/python-ca.crt".freeze, "spec/support/certificates/client-second-level.pem".freeze, "spec/support/certificates/client-second-level.crt".freeze, "spec/support/certificates/client.pem".freeze, "spec/support/certificates/server-second-level.pem".freeze, "spec/support/certificates/server-second-level.crt".freeze, "spec/support/certificates/atlas-ocsp-ca.crt".freeze, "spec/support/certificates/crl_client_revoked.pem".freeze, "spec/support/certificates/client.key".freeze, "spec/support/certificates/README.md".freeze, "spec/support/authorization.rb".freeze, "spec/support/utils.rb".freeze, "spec/support/local_resource_registry.rb".freeze, "spec/support/background_thread_registry.rb".freeze, "spec/support/dns.rb".freeze, "spec/support/common_shortcuts.rb".freeze, "spec/support/primary_socket.rb".freeze, "spec/support/shared/session.rb".freeze, "spec/support/shared/app_metadata.rb".freeze, "spec/support/shared/auth_context.rb".freeze, "spec/support/shared/server_selector.rb".freeze, "spec/support/shared/protocol.rb".freeze, "spec/support/shared/scram_conversation.rb".freeze, "spec/support/crypt.rb".freeze, "spec/support/crypt/data_keys/key_document_local.json".freeze, "spec/support/crypt/data_keys/key_document_aws.json".freeze, "spec/support/crypt/limits/limits-schema.json".freeze, "spec/support/crypt/limits/limits-key.json".freeze, "spec/support/crypt/limits/limits-doc.json".freeze, "spec/support/crypt/corpus/corpus.json".freeze, "spec/support/crypt/corpus/corpus-key-aws.json".freeze, "spec/support/crypt/corpus/corpus-schema.json".freeze, "spec/support/crypt/corpus/corpus-key-local.json".freeze, "spec/support/crypt/corpus/corpus_encrypted.json".freeze, "spec/support/crypt/external/external-schema.json".freeze, "spec/support/crypt/external/external-key.json".freeze, "spec/support/crypt/schema_maps/schema_map_aws.json".freeze, "spec/support/crypt/schema_maps/schema_map_local.json".freeze, "spec/support/crypt/schema_maps/schema_map_local_key_alt_names.json".freeze, "spec/support/crypt/schema_maps/schema_map_aws_key_alt_names.json".freeze, "spec/support/spec_config.rb".freeze, "spec/support/sdam_formatter_integration.rb".freeze, "spec/support/client_registry_macros.rb".freeze, "spec/support/aws_utils.rb".freeze, "spec/support/client_registry.rb".freeze, "spec/support/using_hash.rb".freeze, "spec/support/session_registry.rb".freeze, "spec/support/constraints.rb".freeze, "spec/support/ocsp".freeze, "spec/support/monitoring_ext.rb".freeze, "spec/support/keyword_struct.rb".freeze, "spec/atlas/operations_spec.rb".freeze, "spec/atlas/atlas_connectivity_spec.rb".freeze, "spec/spec_helper.rb".freeze, "spec/README.md".freeze] + s.test_files = ["spec/kerberos/kerberos_spec.rb".freeze, "spec/stress/push_monitor_close_spec.rb".freeze, "spec/stress/fork_reconnect_stress_spec.rb".freeze, "spec/stress/connection_pool_timing_spec.rb".freeze, "spec/stress/cleanup_spec.rb".freeze, "spec/stress/connection_pool_stress_spec.rb".freeze, "spec/README.aws-auth.md".freeze, "spec/USERS.md".freeze, "spec/runners/connection_string.rb".freeze, "spec/runners/sdam/verifier.rb".freeze, "spec/runners/transactions/operation.rb".freeze, "spec/runners/transactions/test.rb".freeze, "spec/runners/transactions/spec.rb".freeze, "spec/runners/change_streams/test.rb".freeze, "spec/runners/change_streams/outcome.rb".freeze, "spec/runners/change_streams/spec.rb".freeze, "spec/runners/unified/support_operations.rb".freeze, "spec/runners/unified/test_group.rb".freeze, "spec/runners/unified/assertions.rb".freeze, "spec/runners/unified/crud_operations.rb".freeze, "spec/runners/unified/event_subscriber.rb".freeze, "spec/runners/unified/test.rb".freeze, "spec/runners/unified/exceptions.rb".freeze, "spec/runners/unified/error.rb".freeze, "spec/runners/unified/grid_fs_operations.rb".freeze, "spec/runners/unified/ddl_operations.rb".freeze, "spec/runners/unified/change_stream_operations.rb".freeze, "spec/runners/unified/entity_map.rb".freeze, "spec/runners/crud/operation.rb".freeze, "spec/runners/crud/test_base.rb".freeze, "spec/runners/crud/test.rb".freeze, "spec/runners/crud/outcome.rb".freeze, "spec/runners/crud/context.rb".freeze, "spec/runners/crud/requirement.rb".freeze, "spec/runners/crud/verifier.rb".freeze, "spec/runners/crud/spec.rb".freeze, "spec/runners/cmap.rb".freeze, "spec/runners/command_monitoring.rb".freeze, "spec/runners/gridfs.rb".freeze, "spec/runners/transactions.rb".freeze, "spec/runners/read_write_concern_document.rb".freeze, "spec/runners/server_selection_rtt.rb".freeze, "spec/runners/unified.rb".freeze, "spec/runners/auth.rb".freeze, "spec/runners/cmap/verifier.rb".freeze, "spec/runners/sdam.rb".freeze, "spec/runners/server_selection.rb".freeze, "spec/runners/crud.rb".freeze, "spec/integration/ocsp_connectivity_spec.rb".freeze, "spec/integration/bulk_write_error_message_spec.rb".freeze, "spec/integration/query_cache_spec.rb".freeze, "spec/integration/sdam_events_spec.rb".freeze, "spec/integration/operation_failure_message_spec.rb".freeze, "spec/integration/ssl_uri_options_spec.rb".freeze, "spec/integration/connection_pool_populator_spec.rb".freeze, "spec/integration/aws_credentials_retriever_spec.rb".freeze, "spec/integration/server_selector_spec.rb".freeze, "spec/integration/check_clean_slate_spec.rb".freeze, "spec/integration/client_authentication_options_spec.rb".freeze, "spec/integration/size_limit_spec.rb".freeze, "spec/integration/crud_spec.rb".freeze, "spec/integration/heartbeat_events_spec.rb".freeze, "spec/integration/client_spec.rb".freeze, "spec/integration/server_selection_spec.rb".freeze, "spec/integration/step_down_spec.rb".freeze, "spec/integration/retryable_writes_errors_spec.rb".freeze, "spec/integration/bulk_write_spec.rb".freeze, "spec/integration/transactions_api_examples_spec.rb".freeze, "spec/integration/change_stream_examples_spec.rb".freeze, "spec/integration/map_reduce_spec.rb".freeze, "spec/integration/snappy_compression_spec.rb".freeze, "spec/integration/sdam_prose_spec.rb".freeze, "spec/integration/server_description_spec.rb".freeze, "spec/integration/client_connectivity_spec.rb".freeze, "spec/integration/versioned_api_examples_spec.rb".freeze, "spec/integration/auth_spec.rb".freeze, "spec/integration/command_monitoring_spec.rb".freeze, "spec/integration/server_spec.rb".freeze, "spec/integration/shell_examples_spec.rb".freeze, "spec/integration/retryable_writes/retryable_writes_40_and_newer_spec.rb".freeze, "spec/integration/retryable_writes/retryable_writes_36_and_older_spec.rb".freeze, "spec/integration/retryable_writes/shared/does_not_support_retries.rb".freeze, "spec/integration/retryable_writes/shared/performs_modern_retries.rb".freeze, "spec/integration/retryable_writes/shared/supports_retries.rb".freeze, "spec/integration/retryable_writes/shared/supports_legacy_retries.rb".freeze, "spec/integration/retryable_writes/shared/performs_legacy_retries.rb".freeze, "spec/integration/retryable_writes/shared/supports_modern_retries.rb".freeze, "spec/integration/retryable_writes/shared/only_supports_legacy_retries.rb".freeze, "spec/integration/retryable_writes/shared/adds_diagnostics.rb".freeze, "spec/integration/retryable_writes/shared/performs_no_retries.rb".freeze, "spec/integration/reconnect_spec.rb".freeze, "spec/integration/secondary_reads_spec.rb".freeze, "spec/integration/x509_auth_spec.rb".freeze, "spec/integration/grid_fs_bucket_spec.rb".freeze, "spec/integration/truncated_utf8_spec.rb".freeze, "spec/integration/bulk_insert_spec.rb".freeze, "spec/integration/transaction_pinning_spec.rb".freeze, "spec/integration/cursor_reaping_spec.rb".freeze, "spec/integration/zlib_compression_spec.rb".freeze, "spec/integration/ocsp_verifier_cache_spec.rb".freeze, "spec/integration/client_construction_spec.rb".freeze, "spec/integration/awaited_ismaster_spec.rb".freeze, "spec/integration/client_construction_aws_auth_spec.rb".freeze, "spec/integration/get_more_spec.rb".freeze, "spec/integration/srv_monitoring_spec.rb".freeze, "spec/integration/error_detection_spec.rb".freeze, "spec/integration/collection_indexes_prose_spec.rb".freeze, "spec/integration/transactions_examples_spec.rb".freeze, "spec/integration/connection_spec.rb".freeze, "spec/integration/read_preference_spec.rb".freeze, "spec/integration/change_stream_spec.rb".freeze, "spec/integration/connect_single_rs_name_spec.rb".freeze, "spec/integration/bson_symbol_spec.rb".freeze, "spec/integration/srv_spec.rb".freeze, "spec/integration/mongos_pinning_spec.rb".freeze, "spec/integration/aws_auth_request_spec.rb".freeze, "spec/integration/cursor_pinning_spec.rb".freeze, "spec/integration/ocsp_verifier_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_reconnect_spec.rb".freeze, "spec/integration/client_side_encryption/external_key_vault_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_mongocryptd_spawn_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_spec.rb".freeze, "spec/integration/client_side_encryption/bypass_mongocryptd_spawn_spec.rb".freeze, "spec/integration/client_side_encryption/client_close_spec.rb".freeze, "spec/integration/client_side_encryption/explicit_encryption_spec.rb".freeze, "spec/integration/client_side_encryption/custom_endpoint_spec.rb".freeze, "spec/integration/client_side_encryption/views_spec.rb".freeze, "spec/integration/client_side_encryption/bson_size_limit_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_command_monitoring_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_bulk_writes_spec.rb".freeze, "spec/integration/client_side_encryption/corpus_spec.rb".freeze, "spec/integration/client_side_encryption/data_key_spec.rb".freeze, "spec/integration/client_side_encryption/auto_encryption_old_wire_version_spec.rb".freeze, "spec/integration/query_cache_transactions_spec.rb".freeze, "spec/integration/command_spec.rb".freeze, "spec/integration/mmapv1_spec.rb".freeze, "spec/integration/time_zone_querying_spec.rb".freeze, "spec/integration/fork_reconnect_spec.rb".freeze, "spec/integration/docs_examples_spec.rb".freeze, "spec/integration/read_concern_spec.rb".freeze, "spec/integration/retryable_errors_spec.rb".freeze, "spec/integration/sdam_error_handling_spec.rb".freeze, "spec/integration/client_update_spec.rb".freeze, "spec/integration/operation_failure_code_spec.rb".freeze, "spec/integration/zstd_compression_spec.rb".freeze, "spec/integration/server_monitor_spec.rb".freeze, "spec/solo/clean_exit_spec.rb".freeze, "spec/shared/bin/s3-copy".freeze, "spec/shared/bin/s3-upload".freeze, "spec/shared/bin/get-mongodb-download-url".freeze, "spec/shared/lib/mrss/server_version_registry.rb".freeze, "spec/shared/lib/mrss/event_subscriber.rb".freeze, "spec/shared/lib/mrss/child_process_helper.rb".freeze, "spec/shared/lib/mrss/utils.rb".freeze, "spec/shared/lib/mrss/spec_organizer.rb".freeze, "spec/shared/lib/mrss/docker_runner.rb".freeze, "spec/shared/lib/mrss/constraints.rb".freeze, "spec/shared/lib/mrss/lite_constraints.rb".freeze, "spec/shared/lib/mrss/cluster_config.rb".freeze, "spec/shared/LICENSE".freeze, "spec/shared/shlib/set_env.sh".freeze, "spec/shared/shlib/server.sh".freeze, "spec/shared/shlib/distro.sh".freeze, "spec/shared/share/Dockerfile.erb".freeze, "spec/shared/share/haproxy-1.conf".freeze, "spec/shared/share/haproxy-2.conf".freeze, "spec/lite_spec_helper.rb".freeze, "spec/spec_tests/cmap_spec.rb".freeze, "spec/spec_tests/connection_string_spec.rb".freeze, "spec/spec_tests/seed_list_discovery_spec.rb".freeze, "spec/spec_tests/sdam_monitoring_spec.rb".freeze, "spec/spec_tests/sdam_integration_spec.rb".freeze, "spec/spec_tests/crud_spec.rb".freeze, "spec/spec_tests/server_selection_spec.rb".freeze, "spec/spec_tests/retryable_writes_spec.rb".freeze, "spec/spec_tests/load_balancers_spec.rb".freeze, "spec/spec_tests/auth_spec.rb".freeze, "spec/spec_tests/command_monitoring_spec.rb".freeze, "spec/spec_tests/read_write_concern_document_spec.rb".freeze, "spec/spec_tests/transactions_api_spec.rb".freeze, "spec/spec_tests/uri_options_spec.rb".freeze, "spec/spec_tests/crud_unified_spec.rb".freeze, "spec/spec_tests/versioned_api_spec.rb".freeze, "spec/spec_tests/gridfs_spec.rb".freeze, "spec/spec_tests/client_side_encryption_spec.rb".freeze, "spec/spec_tests/retryable_reads_spec.rb".freeze, "spec/spec_tests/max_staleness_spec.rb".freeze, "spec/spec_tests/sdam_spec.rb".freeze, "spec/spec_tests/read_write_concern_connection_string_spec.rb".freeze, "spec/spec_tests/command_monitoring_unified_spec.rb".freeze, "spec/spec_tests/read_write_concern_operaton_spec.rb".freeze, "spec/spec_tests/server_selection_rtt_spec.rb".freeze, "spec/spec_tests/change_streams_unified_spec.rb".freeze, "spec/spec_tests/collection_management_spec.rb".freeze, "spec/spec_tests/change_streams_spec.rb".freeze, "spec/spec_tests/transactions_unified_spec.rb".freeze, "spec/spec_tests/transactions_spec.rb".freeze, "spec/spec_tests/data/sdam_monitoring/load_balancer.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_me_mismatch.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone_repeated.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_second_seed_removal.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_primary_address_change.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_primary_and_secondary.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_other_change.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_other_chain.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_primary.yml".freeze, "spec/spec_tests/data/sdam_monitoring/discovered_standalone.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_removal.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone_suppress_equal_description_changes.yml".freeze, "spec/spec_tests/data/sdam_monitoring/required_replica_set.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_primary_removal.yml".freeze, "spec/spec_tests/data/sdam_monitoring/replica_set_with_no_primary.yml".freeze, "spec/spec_tests/data/sdam_monitoring/standalone_to_rs_with_me_mismatch.yml".freeze, "spec/spec_tests/data/server_selection_rtt/first_value.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_3.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_2.yml".freeze, "spec/spec_tests/data/server_selection_rtt/first_value_zero.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_1.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_5.yml".freeze, "spec/spec_tests/data/server_selection_rtt/value_test_4.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-network.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/prefer-error-code.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-network-timeout-error.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-timeout.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-NotPrimaryNoSecondaryOk.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-NotPrimaryOrSecondary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/pre-42-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-beforeHandshakeCompletes-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-topologyVersion-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/error_handling_handshake.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-network.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-ShutdownInProgress.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-network-error.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-NotWritablePrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-LegacyNotPrimary.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-proccessId-changed-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-InterruptedAtShutdown.yml".freeze, "spec/spec_tests/data/sdam/errors/stale-generation-afterHandshakeCompletes-timeout.yml".freeze, "spec/spec_tests/data/sdam/errors/write_errors_ignored.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-missing-PrimarySteppedDown.yml".freeze, "spec/spec_tests/data/sdam/errors/post-42-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/errors/non-stale-topologyVersion-greater-InterruptedDueToReplStateChange.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_rsarbiter.yml".freeze, "spec/spec_tests/data/sdam/single/too_old.yml".freeze, "spec/spec_tests/data/sdam/single/too_old_then_upgraded.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_mongos.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_rsprimary.yml".freeze, "spec/spec_tests/data/sdam/single/standalone_using_legacy_hello.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_replicaset.yml".freeze, "spec/spec_tests/data/sdam/single/discover_unavailable_seed.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_unavailable_seed.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_standalone.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_rssecondary.yml".freeze, "spec/spec_tests/data/sdam/single/ruby_primary_different_address.yml".freeze, "spec/spec_tests/data/sdam/single/too_new.yml".freeze, "spec/spec_tests/data/sdam/single/direct_connection_external_ip.yml".freeze, "spec/spec_tests/data/sdam/single/compatible.yml".freeze, "spec/spec_tests/data/sdam/single/standalone_removed.yml".freeze, "spec/spec_tests/data/sdam/single/not_ok_response.yml".freeze, "spec/spec_tests/data/sdam/single/ruby_primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/single/discover_standalone.yml".freeze, "spec/spec_tests/data/sdam/single/ls_timeout_standalone.yml".freeze, "spec/spec_tests/data/sdam/load-balanced/discover_load_balancer.yml".freeze, "spec/spec_tests/data/sdam/rs/ls_timeout.yml".freeze, "spec/spec_tests/data/sdam/rs/incompatible_other.yml".freeze, "spec/spec_tests/data/sdam/rs/hosts_differ_from_seeds.yml".freeze, "spec/spec_tests/data/sdam/rs/ruby_secondary_wrong_set_name_with_primary_second.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_wrong_set_name_with_primary.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_to_no_primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/non_rs_member.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_secondary_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_primary.yml".freeze, "spec/spec_tests/data/sdam/rs/too_old.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_ghost_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/topology_version_greater.yml".freeze, "spec/spec_tests/data/sdam/rs/normalize_case_me.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_changes_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_secondary.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_rsother_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_mismatched_me_not_removed.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_rsother.yml".freeze, "spec/spec_tests/data/sdam/rs/response_from_removed.yml".freeze, "spec/spec_tests/data/sdam/rs/null_election_id.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_disconnect_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_disconnect.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary_new_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/normalize_case.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_reports_new_member.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_arbiters_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/topology_version_less.yml".freeze, "spec/spec_tests/data/sdam/rs/sec_not_auth.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/discovery.yml".freeze, "spec/spec_tests/data/sdam/rs/use_setversion_without_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_hint_from_secondary_with_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_primary_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_becomes_standalone.yml".freeze, "spec/spec_tests/data/sdam/rs/setversion_without_electionid.yml".freeze, "spec/spec_tests/data/sdam/rs/unexpected_mongos.yml".freeze, "spec/spec_tests/data/sdam/rs/member_reconfig.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_disconnect_setversion.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/repeated.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_arbiters.yml".freeze, "spec/spec_tests/data/sdam/rs/topology_version_equal.yml".freeze, "spec/spec_tests/data/sdam/rs/incompatible_arbiter.yml".freeze, "spec/spec_tests/data/sdam/rs/compatible_unknown.yml".freeze, "spec/spec_tests/data/sdam/rs/too_new.yml".freeze, "spec/spec_tests/data/sdam/rs/ruby_primary_address_change.yml".freeze, "spec/spec_tests/data/sdam/rs/incompatible_ghost.yml".freeze, "spec/spec_tests/data/sdam/rs/wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/new_primary_new_setversion.yml".freeze, "spec/spec_tests/data/sdam/rs/member_standalone.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_becomes_mongos.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_hidden_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/rs/compatible.yml".freeze, "spec/spec_tests/data/sdam/rs/secondary_ignore_ok_0.yml".freeze, "spec/spec_tests/data/sdam/rs/replicaset_rsnp.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_wrong_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_ghost.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_passives_replicaset.yml".freeze, "spec/spec_tests/data/sdam/rs/stepdown_change_set_name.yml".freeze, "spec/spec_tests/data/sdam/rs/primary_becomes_ghost.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_hidden.yml".freeze, "spec/spec_tests/data/sdam/rs/discover_passives.yml".freeze, "spec/spec_tests/data/sdam/rs/equal_electionids.yml".freeze, "spec/spec_tests/data/sdam/sharded/non_mongos_removed.yml".freeze, "spec/spec_tests/data/sdam/sharded/multiple_mongoses.yml".freeze, "spec/spec_tests/data/sdam/sharded/too_old.yml".freeze, "spec/spec_tests/data/sdam/sharded/mongos_disconnect.yml".freeze, "spec/spec_tests/data/sdam/sharded/discover_single_mongos.yml".freeze, "spec/spec_tests/data/sdam/sharded/ls_timeout_mongos.yml".freeze, "spec/spec_tests/data/sdam/sharded/ruby_primary_different_address.yml".freeze, "spec/spec_tests/data/sdam/sharded/too_new.yml".freeze, "spec/spec_tests/data/sdam/sharded/compatible.yml".freeze, "spec/spec_tests/data/sdam/sharded/ruby_primary_mismatched_me.yml".freeze, "spec/spec_tests/data/sdam/sharded/normalize_uri_case.yml".freeze, "spec/spec_tests/data/auth/connection-string.yml".freeze, "spec/spec_tests/data/transactions/errors.yml".freeze, "spec/spec_tests/data/transactions/insert.yml".freeze, "spec/spec_tests/data/transactions/error-labels.yml".freeze, "spec/spec_tests/data/transactions/delete.yml".freeze, "spec/spec_tests/data/transactions/mongos-pin-auto.yml".freeze, "spec/spec_tests/data/transactions/write-concern.yml".freeze, "spec/spec_tests/data/transactions/run-command.yml".freeze, "spec/spec_tests/data/transactions/pin-mongos.yml".freeze, "spec/spec_tests/data/transactions/update.yml".freeze, "spec/spec_tests/data/transactions/errors-client.yml".freeze, "spec/spec_tests/data/transactions/mongos-recovery-token.yml".freeze, "spec/spec_tests/data/transactions/findOneAndDelete.yml".freeze, "spec/spec_tests/data/transactions/transaction-options-repl.yml".freeze, "spec/spec_tests/data/transactions/findOneAndReplace.yml".freeze, "spec/spec_tests/data/transactions/bulk.yml".freeze, "spec/spec_tests/data/transactions/create-index.yml".freeze, "spec/spec_tests/data/transactions/transaction-options.yml".freeze, "spec/spec_tests/data/transactions/isolation.yml".freeze, "spec/spec_tests/data/transactions/retryable-commit.yml".freeze, "spec/spec_tests/data/transactions/read-pref.yml".freeze, "spec/spec_tests/data/transactions/reads.yml".freeze, "spec/spec_tests/data/transactions/retryable-abort-errorLabels.yml".freeze, "spec/spec_tests/data/transactions/abort.yml".freeze, "spec/spec_tests/data/transactions/count.yml".freeze, "spec/spec_tests/data/transactions/retryable-commit-errorLabels.yml".freeze, "spec/spec_tests/data/transactions/commit.yml".freeze, "spec/spec_tests/data/transactions/causal-consistency.yml".freeze, "spec/spec_tests/data/transactions/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/transactions/retryable-abort.yml".freeze, "spec/spec_tests/data/transactions/retryable-writes.yml".freeze, "spec/spec_tests/data/transactions/create-collection.yml".freeze, "spec/spec_tests/data/transactions/read-concern.yml".freeze, "spec/spec_tests/data/connection_string/valid-host_identifiers.yml".freeze, "spec/spec_tests/data/connection_string/valid-warnings.yml".freeze, "spec/spec_tests/data/connection_string/invalid-uris.yml".freeze, "spec/spec_tests/data/connection_string/valid-db-with-dotted-name.yml".freeze, "spec/spec_tests/data/connection_string/valid-unix_socket-absolute.yml".freeze, "spec/spec_tests/data/connection_string/valid-options.yml".freeze, "spec/spec_tests/data/connection_string/valid-auth.yml".freeze, "spec/spec_tests/data/connection_string/valid-unix_socket-relative.yml".freeze, "spec/spec_tests/data/change_streams/change-streams-errors.yml".freeze, "spec/spec_tests/data/change_streams/change-streams-resume-errorLabels.yml".freeze, "spec/spec_tests/data/change_streams/change-streams.yml".freeze, "spec/spec_tests/data/change_streams/change-streams-resume-allowlist.yml".freeze, "spec/spec_tests/data/load_balancers/server-selection.yml".freeze, "spec/spec_tests/data/load_balancers/non-lb-connection-establishment.yml".freeze, "spec/spec_tests/data/load_balancers/event-monitoring.yml".freeze, "spec/spec_tests/data/load_balancers/lb-connection-establishment.yml".freeze, "spec/spec_tests/data/unified/valid-fail/operation-failure.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-retryable-writes.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-gridfs.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-transactions-mongos-pin-auto.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-transactions-convenient-api.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-transactions.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-sessions.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-change-streams.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-retryable-reads.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-command-monitoring.yml".freeze, "spec/spec_tests/data/unified/valid-pass/poc-crud.yml".freeze, "spec/spec_tests/data/retryable_writes/insertOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/insertMany-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndDelete-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/insertOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteOne.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndUpdate-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/bulkWrite.yml".freeze, "spec/spec_tests/data/retryable_writes/insertOne.yml".freeze, "spec/spec_tests/data/retryable_writes/updateOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/updateOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndDelete-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndDelete.yml".freeze, "spec/spec_tests/data/retryable_writes/updateOne.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndReplace.yml".freeze, "spec/spec_tests/data/retryable_writes/replaceOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/insertMany-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndUpdate-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/replaceOne.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/bulkWrite-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/insertMany.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndReplace-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_writes/replaceOne-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/retryable_writes/bulkWrite-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/findOneAndReplace-errorLabels.yml".freeze, "spec/spec_tests/data/retryable_writes/updateMany.yml".freeze, "spec/spec_tests/data/retryable_writes/deleteMany.yml".freeze, "spec/spec_tests/data/max_staleness/Unknown/SmallMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Nearest.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/ZeroMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/MaxStalenessWithModePrimary.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/LastUpdateTime.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Nearest_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Nearest2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/MaxStalenessTooSmall.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Secondary_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/Secondary_tags2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/LongHeartbeat2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/DefaultNoMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/LongHeartbeat.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetWithPrimary/PrimaryPreferred_incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Nearest.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/ZeroMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/NoKnownServers.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/PrimaryPreferred_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/LastUpdateTime.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Nearest2.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/MaxStalenessTooSmall.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/SecondaryPreferred_tags.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/Secondary.yml".freeze, "spec/spec_tests/data/max_staleness/ReplicaSetNoPrimary/DefaultNoMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/Single/SmallMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/Single/Incompatible.yml".freeze, "spec/spec_tests/data/max_staleness/Sharded/SmallMaxStaleness.yml".freeze, "spec/spec_tests/data/max_staleness/Sharded/Incompatible.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabases.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionNames-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.coll.watch-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/findOne-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/aggregate.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-serverErrors-pre4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseObjects-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexes.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabases-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.coll.watch.yml".freeze, "spec/spec_tests/data/retryable_reads/count-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-client.watch.yml".freeze, "spec/spec_tests/data/retryable_reads/countDocuments.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-downloadByName.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-downloadByName-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/countDocuments-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexes-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.watch-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexNames.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseNames.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseNames-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-download-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/distinct-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionNames.yml".freeze, "spec/spec_tests/data/retryable_reads/findOne.yml".freeze, "spec/spec_tests/data/retryable_reads/mapReduce.yml".freeze, "spec/spec_tests/data/retryable_reads/distinct.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-db.watch.yml".freeze, "spec/spec_tests/data/retryable_reads/aggregate-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listDatabaseObjects.yml".freeze, "spec/spec_tests/data/retryable_reads/count.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-pre4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionObjects.yml".freeze, "spec/spec_tests/data/retryable_reads/changeStreams-client.watch-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollectionObjects-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/aggregate-merge.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollections.yml".freeze, "spec/spec_tests/data/retryable_reads/gridfs-download.yml".freeze, "spec/spec_tests/data/retryable_reads/listIndexNames-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/find.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-serverErrors-4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/estimatedDocumentCount-4.9.yml".freeze, "spec/spec_tests/data/retryable_reads/find-serverErrors.yml".freeze, "spec/spec_tests/data/retryable_reads/listCollections-serverErrors.yml".freeze, "spec/spec_tests/data/transactions_unified/mongos-unpin.yml".freeze, "spec/spec_tests/data/versioned_api/crud-api-version-1-strict.yml".freeze, "spec/spec_tests/data/versioned_api/test-commands-strict-mode.yml".freeze, "spec/spec_tests/data/versioned_api/transaction-handling.yml".freeze, "spec/spec_tests/data/versioned_api/runcommand-helper-no-api-version-declared.yml".freeze, "spec/spec_tests/data/versioned_api/crud-api-version-1.yml".freeze, "spec/spec_tests/data/versioned_api/test-commands-deprecation-errors.yml".freeze, "spec/spec_tests/data/server_selection/Unknown/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Nearest.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Nearest_multiple.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Secondary_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_tags.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Secondary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Nearest_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetWithPrimary/read/Primary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Nearest.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Nearest_multiple.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags2.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PossiblePrimaryNearest.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Secondary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Nearest_non_matching.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/PossiblePrimary.yml".freeze, "spec/spec_tests/data/server_selection/ReplicaSetNoPrimary/read/Primary.yml".freeze, "spec/spec_tests/data/server_selection/Single/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/Nearest.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/SecondaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/PrimaryPreferred.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/Secondary.yml".freeze, "spec/spec_tests/data/server_selection/Sharded/read/Primary.yml".freeze, "spec/spec_tests/data/command_monitoring_unified/redacted-commands.yml".freeze, "spec/spec_tests/data/uri_options/auth-options.yml".freeze, "spec/spec_tests/data/uri_options/read-preference-options.yml".freeze, "spec/spec_tests/data/uri_options/ruby-connection-options.yml".freeze, "spec/spec_tests/data/uri_options/tls-options.yml".freeze, "spec/spec_tests/data/uri_options/connection-options.yml".freeze, "spec/spec_tests/data/uri_options/ruby-auth-options.yml".freeze, "spec/spec_tests/data/uri_options/connection-pool-options.yml".freeze, "spec/spec_tests/data/uri_options/compression-options.yml".freeze, "spec/spec_tests/data/uri_options/concern-options.yml".freeze, "spec/spec_tests/data/crud/read/aggregate-out.yml".freeze, "spec/spec_tests/data/crud/read/aggregate.yml".freeze, "spec/spec_tests/data/crud/read/aggregate-collation.yml".freeze, "spec/spec_tests/data/crud/read/distinct-collation.yml".freeze, "spec/spec_tests/data/crud/read/distinct.yml".freeze, "spec/spec_tests/data/crud/read/count.yml".freeze, "spec/spec_tests/data/crud/read/count-collation.yml".freeze, "spec/spec_tests/data/crud/read/find.yml".freeze, "spec/spec_tests/data/crud/read/find-collation.yml".freeze, "spec/spec_tests/data/crud/read/count-empty.yml".freeze, "spec/spec_tests/data/crud/write/updateOne-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace-upsert.yml".freeze, "spec/spec_tests/data/crud/write/deleteOne.yml".freeze, "spec/spec_tests/data/crud/write/deleteOne-collation.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndUpdate-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/bulkWrite-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/bulkWrite.yml".freeze, "spec/spec_tests/data/crud/write/updateMany-arrayFilters.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndUpdate-collation.yml".freeze, "spec/spec_tests/data/crud/write/insertOne.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne-collation.yml".freeze, "spec/spec_tests/data/crud/write/updateOne-collation.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndDelete.yml".freeze, "spec/spec_tests/data/crud/write/updateOne.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace.yml".freeze, "spec/spec_tests/data/crud/write/updateMany-collation.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne.yml".freeze, "spec/spec_tests/data/crud/write/updateOne-pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/bulkWrite-collation.yml".freeze, "spec/spec_tests/data/crud/write/insertMany.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace-collation.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndDelete-collation.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne-upsert.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/crud/write/updateMany-pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/updateMany.yml".freeze, "spec/spec_tests/data/crud/write/deleteMany-collation.yml".freeze, "spec/spec_tests/data/crud/write/replaceOne-pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/findOneAndReplace-upsert_pre_2.6.yml".freeze, "spec/spec_tests/data/crud/write/deleteMany.yml".freeze, "spec/spec_tests/data/collection_management/timeseries-collection.yml".freeze, "spec/spec_tests/data/sdam_integration/insert-network-error.yml".freeze, "spec/spec_tests/data/sdam_integration/find-network-error.yml".freeze, "spec/spec_tests/data/sdam_integration/hello-command-error.yml".freeze, "spec/spec_tests/data/sdam_integration/connectTimeoutMS.yml".freeze, "spec/spec_tests/data/sdam_integration/find-shutdown-error.yml".freeze, "spec/spec_tests/data/sdam_integration/insert-shutdown-error.yml".freeze, "spec/spec_tests/data/sdam_integration/hello-timeout.yml".freeze, "spec/spec_tests/data/sdam_integration/cancel-server-check.yml".freeze, "spec/spec_tests/data/sdam_integration/hello-network-error.yml".freeze, "spec/spec_tests/data/sdam_integration/rediscover-quickly-after-step-down.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-with-overridden-ssl-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-not-allowed-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/one-result-default-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/misformatted-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-admin-database.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch2.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch4.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/two-results-nonstandard-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-with-unallowed-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/one-txt-record-multiple-strings.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/two-results-default-port.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/longer-parent-in-return.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/encoded-userinfo-and-db.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch5.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch1.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/returned-parent-wrong.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/direct-connection-false.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-two-hosts.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/direct-connection-true.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/not-enough-parts.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/loadBalanced-false-txt.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/no-results.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/one-txt-record.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/returned-parent-too-short.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/uri-with-auth.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/two-txt-records.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/parent-part-mismatch3.yml".freeze, "spec/spec_tests/data/seed_list_discovery/replica-set/txt-record-with-overridden-uri-option.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-true-txt.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-true-multiple-hosts.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-replicaSet-errors.yml".freeze, "spec/spec_tests/data/seed_list_discovery/load-balanced/loadBalanced-directConnection.yml".freeze, "spec/spec_tests/data/transactions_api/callback-commits.yml".freeze, "spec/spec_tests/data/transactions_api/callback-aborts.yml".freeze, "spec/spec_tests/data/transactions_api/commit-retry.yml".freeze, "spec/spec_tests/data/transactions_api/commit-transienttransactionerror.yml".freeze, "spec/spec_tests/data/transactions_api/transaction-options.yml".freeze, "spec/spec_tests/data/transactions_api/callback-retry.yml".freeze, "spec/spec_tests/data/transactions_api/commit-writeconcernerror.yml".freeze, "spec/spec_tests/data/transactions_api/commit-transienttransactionerror-4.2.yml".freeze, "spec/spec_tests/data/transactions_api/commit.yml".freeze, "spec/spec_tests/data/client_side_encryption/bypassAutoEncryption.yml".freeze, "spec/spec_tests/data/client_side_encryption/aggregate.yml".freeze, "spec/spec_tests/data/client_side_encryption/malformedCiphertext.yml".freeze, "spec/spec_tests/data/client_side_encryption/insert.yml".freeze, "spec/spec_tests/data/client_side_encryption/types.yml".freeze, "spec/spec_tests/data/client_side_encryption/getMore.yml".freeze, "spec/spec_tests/data/client_side_encryption/delete.yml".freeze, "spec/spec_tests/data/client_side_encryption/maxWireVersion.yml".freeze, "spec/spec_tests/data/client_side_encryption/localSchema.yml".freeze, "spec/spec_tests/data/client_side_encryption/countDocuments.yml".freeze, "spec/spec_tests/data/client_side_encryption/badQueries.yml".freeze, "spec/spec_tests/data/client_side_encryption/bypassedCommand.yml".freeze, "spec/spec_tests/data/client_side_encryption/findOneAndDelete.yml".freeze, "spec/spec_tests/data/client_side_encryption/updateOne.yml".freeze, "spec/spec_tests/data/client_side_encryption/missingKey.yml".freeze, "spec/spec_tests/data/client_side_encryption/findOneAndReplace.yml".freeze, "spec/spec_tests/data/client_side_encryption/bulk.yml".freeze, "spec/spec_tests/data/client_side_encryption/keyAltName.yml".freeze, "spec/spec_tests/data/client_side_encryption/explain.yml".freeze, "spec/spec_tests/data/client_side_encryption/replaceOne.yml".freeze, "spec/spec_tests/data/client_side_encryption/distinct.yml".freeze, "spec/spec_tests/data/client_side_encryption/count.yml".freeze, "spec/spec_tests/data/client_side_encryption/badSchema.yml".freeze, "spec/spec_tests/data/client_side_encryption/localKMS.yml".freeze, "spec/spec_tests/data/client_side_encryption/findOneAndUpdate.yml".freeze, "spec/spec_tests/data/client_side_encryption/unsupportedCommand.yml".freeze, "spec/spec_tests/data/client_side_encryption/updateMany.yml".freeze, "spec/spec_tests/data/client_side_encryption/find.yml".freeze, "spec/spec_tests/data/client_side_encryption/basic.yml".freeze, "spec/spec_tests/data/crud_unified/updateWithPipelines.yml".freeze, "spec/spec_tests/data/crud_unified/estimatedDocumentCount.yml".freeze, "spec/spec_tests/data/command_monitoring/deleteOne.yml".freeze, "spec/spec_tests/data/command_monitoring/unacknowledgedBulkWrite.yml".freeze, "spec/spec_tests/data/command_monitoring/bulkWrite.yml".freeze, "spec/spec_tests/data/command_monitoring/insertOne.yml".freeze, "spec/spec_tests/data/command_monitoring/updateOne.yml".freeze, "spec/spec_tests/data/command_monitoring/command.yml".freeze, "spec/spec_tests/data/command_monitoring/insertMany.yml".freeze, "spec/spec_tests/data/command_monitoring/updateMany.yml".freeze, "spec/spec_tests/data/command_monitoring/find.yml".freeze, "spec/spec_tests/data/command_monitoring/deleteMany.yml".freeze, "spec/spec_tests/data/cmap/pool-create-with-options.yml".freeze, "spec/spec_tests/data/cmap/pool-close.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin-make-available.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-no-stale.yml".freeze, "spec/spec_tests/data/cmap/wait-queue-timeout.yml".freeze, "spec/spec_tests/data/cmap/pool-create.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin.yml".freeze, "spec/spec_tests/data/cmap/pool-create-min-size.yml".freeze, "spec/spec_tests/data/cmap/wait-queue-fairness.yml".freeze, "spec/spec_tests/data/cmap/connection-must-have-id.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin-destroy-closed.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-multiple.yml".freeze, "spec/spec_tests/data/cmap/pool-checkin-destroy-stale.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-error-closed.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-connection.yml".freeze, "spec/spec_tests/data/cmap/connection-must-order-ids.yml".freeze, "spec/spec_tests/data/cmap/pool-checkout-no-idle.yml".freeze, "spec/spec_tests/data/cmap/pool-create-max-size.yml".freeze, "spec/spec_tests/data/cmap/pool-close-destroy-conns.yml".freeze, "spec/spec_tests/data/change_streams_unified/change-streams.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-findOneAndDelete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndUpdate-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-replaceOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteMany-hint.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-update-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-delete-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/aggregate-out-readConcern.yml".freeze, "spec/spec_tests/data/crud_v2/updateMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateOne-hint.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-deleteOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-arrayFilters.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-bulkWrite-delete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateMany-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/find-allowdiskuse-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-updateMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-findOneAndUpdate-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndReplace-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteOne-hint.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-update-hint.yml".freeze, "spec/spec_tests/data/crud_v2/deleteMany-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-delete-hint.yml".freeze, "spec/spec_tests/data/crud_v2/db-aggregate.yml".freeze, "spec/spec_tests/data/crud_v2/updateOne-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/replaceOne-hint.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndDelete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateWithPipelines.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-update-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/bulkWrite-delete-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/find-allowdiskuse.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-updateOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndUpdate-hint.yml".freeze, "spec/spec_tests/data/crud_v2/find-allowdiskuse-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-deleteMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndDelete-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/aggregate-merge.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndReplace-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/updateOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-findOneAndReplace-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteOne-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/unacknowledged-bulkWrite-update-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndReplace-hint.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndDelete-hint.yml".freeze, "spec/spec_tests/data/crud_v2/findOneAndUpdate-hint-serverError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteOne-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/deleteMany-hint-clientError.yml".freeze, "spec/spec_tests/data/crud_v2/updateMany-hint.yml".freeze, "spec/spec_tests/data/gridfs/download.yml".freeze, "spec/spec_tests/data/gridfs/delete.yml".freeze, "spec/spec_tests/data/gridfs/upload.yml".freeze, "spec/spec_tests/data/gridfs/download_by_name.yml".freeze, "spec/spec_tests/data/read_write_concern/connection-string/write-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/connection-string/read-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/document/write-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/document/read-concern.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-2.6.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-4.2.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-3.2.yml".freeze, "spec/spec_tests/data/read_write_concern/operation/default-write-concern-3.4.yml".freeze, "spec/spec_tests/unified_spec.rb".freeze, "spec/mongo/query_cache_spec.rb".freeze, "spec/mongo/cluster/cursor_reaper_spec.rb".freeze, "spec/mongo/cluster/topology_spec.rb".freeze, "spec/mongo/cluster/socket_reaper_spec.rb".freeze, "spec/mongo/cluster/topology/unknown_spec.rb".freeze, "spec/mongo/cluster/topology/sharded_spec.rb".freeze, "spec/mongo/cluster/topology/single_spec.rb".freeze, "spec/mongo/cluster/topology/replica_set_spec.rb".freeze, "spec/mongo/cluster/periodic_executor_spec.rb".freeze, "spec/mongo/logger_spec.rb".freeze, "spec/mongo/cursor_spec.rb".freeze, "spec/mongo/id_spec.rb".freeze, "spec/mongo/server_selector_spec.rb".freeze, "spec/mongo/auth/stringprep_spec.rb".freeze, "spec/mongo/auth/ldap/conversation_spec.rb".freeze, "spec/mongo/auth/stringprep/profiles/sasl_spec.rb".freeze, "spec/mongo/auth/gssapi/conversation_spec.rb".freeze, "spec/mongo/auth/x509_spec.rb".freeze, "spec/mongo/auth/cr_spec.rb".freeze, "spec/mongo/auth/ldap_spec.rb".freeze, "spec/mongo/auth/scram_negotiation_spec.rb".freeze, "spec/mongo/auth/scram256/conversation_spec.rb".freeze, "spec/mongo/auth/user_spec.rb".freeze, "spec/mongo/auth/invalid_mechanism_spec.rb".freeze, "spec/mongo/auth/scram/conversation_spec.rb".freeze, "spec/mongo/auth/scram_spec.rb".freeze, "spec/mongo/auth/x509/conversation_spec.rb".freeze, "spec/mongo/auth/user/view_spec.rb".freeze, "spec/mongo/auth/aws/request_region_spec.rb".freeze, "spec/mongo/auth/aws/request_spec.rb".freeze, "spec/mongo/distinguishing_semaphore_spec.rb".freeze, "spec/mongo/grid/stream/write_spec.rb".freeze, "spec/mongo/grid/stream/read_spec.rb".freeze, "spec/mongo/grid/file_spec.rb".freeze, "spec/mongo/grid/fs_bucket_spec.rb".freeze, "spec/mongo/grid/stream_spec.rb".freeze, "spec/mongo/grid/file/info_spec.rb".freeze, "spec/mongo/grid/file/chunk_spec.rb".freeze, "spec/mongo/tls_context_hooks_spec.rb".freeze, "spec/mongo/options/redacted_spec.rb".freeze, "spec/mongo/client_spec.rb".freeze, "spec/mongo/lint_spec.rb".freeze, "spec/mongo/bulk_write_spec.rb".freeze, "spec/mongo/bulk_write/unordered_combiner_spec.rb".freeze, "spec/mongo/bulk_write/result_spec.rb".freeze, "spec/mongo/bulk_write/ordered_combiner_spec.rb".freeze, "spec/mongo/write_concern/unacknowledged_spec.rb".freeze, "spec/mongo/write_concern/acknowledged_spec.rb".freeze, "spec/mongo/dbref_spec.rb".freeze, "spec/mongo/auth_spec.rb".freeze, "spec/mongo/server_spec.rb".freeze, "spec/mongo/caching_cursor_spec.rb".freeze, "spec/mongo/uri/srv_protocol_spec.rb".freeze, "spec/mongo/cluster_time_spec.rb".freeze, "spec/mongo/session_transaction_spec.rb".freeze, "spec/mongo/query_cache_middleware_spec.rb".freeze, "spec/mongo/client_construction_spec.rb".freeze, "spec/mongo/address/ipv6_spec.rb".freeze, "spec/mongo/address/validator_spec.rb".freeze, "spec/mongo/address/ipv4_spec.rb".freeze, "spec/mongo/address/unix_spec.rb".freeze, "spec/mongo/operation/indexes_spec.rb".freeze, "spec/mongo/operation/limited_spec.rb".freeze, "spec/mongo/operation/aggregate_spec.rb".freeze, "spec/mongo/operation/insert/bulk_spec.rb".freeze, "spec/mongo/operation/insert/op_msg_spec.rb".freeze, "spec/mongo/operation/insert/command_spec.rb".freeze, "spec/mongo/operation/delete_spec.rb".freeze, "spec/mongo/operation/update_spec.rb".freeze, "spec/mongo/operation/specifiable_spec.rb".freeze, "spec/mongo/operation/map_reduce_spec.rb".freeze, "spec/mongo/operation/result_spec.rb".freeze, "spec/mongo/operation/kill_cursors_spec.rb".freeze, "spec/mongo/operation/aggregate/result_spec.rb".freeze, "spec/mongo/operation/collections_info_spec.rb".freeze, "spec/mongo/operation/create_index_spec.rb".freeze, "spec/mongo/operation/update/bulk_spec.rb".freeze, "spec/mongo/operation/update/op_msg_spec.rb".freeze, "spec/mongo/operation/update/command_spec.rb".freeze, "spec/mongo/operation/find/builder/modifiers_spec.rb".freeze, "spec/mongo/operation/find/builder/flags_spec.rb".freeze, "spec/mongo/operation/find/legacy_spec.rb".freeze, "spec/mongo/operation/read_preference_op_msg_spec.rb".freeze, "spec/mongo/operation/get_more_spec.rb".freeze, "spec/mongo/operation/create_user_spec.rb".freeze, "spec/mongo/operation/remove_user_spec.rb".freeze, "spec/mongo/operation/command_spec.rb".freeze, "spec/mongo/operation/update_user_spec.rb".freeze, "spec/mongo/operation/delete/bulk_spec.rb".freeze, "spec/mongo/operation/delete/op_msg_spec.rb".freeze, "spec/mongo/operation/delete/command_spec.rb".freeze, "spec/mongo/operation/read_preference_legacy_spec.rb".freeze, "spec/mongo/operation/drop_index_spec.rb".freeze, "spec/mongo/operation/insert_spec.rb".freeze, "spec/mongo/server/monitor_spec.rb".freeze, "spec/mongo/server/connection_auth_spec.rb".freeze, "spec/mongo/server/app_metadata_spec.rb".freeze, "spec/mongo/server/monitor/app_metadata_spec.rb".freeze, "spec/mongo/server/monitor/connection_spec.rb".freeze, "spec/mongo/server/description_spec.rb".freeze, "spec/mongo/server/round_trip_time_averager_spec.rb".freeze, "spec/mongo/server/description_query_methods_spec.rb".freeze, "spec/mongo/server/description/features_spec.rb".freeze, "spec/mongo/server/connection_common_spec.rb".freeze, "spec/mongo/server/connection_pool/populator_spec.rb".freeze, "spec/mongo/server/connection_spec.rb".freeze, "spec/mongo/server/connection_pool_spec.rb".freeze, "spec/mongo/error/notable_spec.rb".freeze, "spec/mongo/error/operation_failure_heavy_spec.rb".freeze, "spec/mongo/error/crypt_error_spec.rb".freeze, "spec/mongo/error/unsupported_option_spec.rb".freeze, "spec/mongo/error/parser_spec.rb".freeze, "spec/mongo/error/operation_failure_spec.rb".freeze, "spec/mongo/error/max_bson_size_spec.rb".freeze, "spec/mongo/error/bulk_write_error_spec.rb".freeze, "spec/mongo/error/no_server_available_spec.rb".freeze, "spec/mongo/socket_spec.rb".freeze, "spec/mongo/index/view_spec.rb".freeze, "spec/mongo/event/subscriber_spec.rb".freeze, "spec/mongo/event/publisher_spec.rb".freeze, "spec/mongo/cluster_spec.rb".freeze, "spec/mongo/crypt/auto_encryption_context_spec.rb".freeze, "spec/mongo/crypt/encryption_io_spec.rb".freeze, "spec/mongo/crypt/status_spec.rb".freeze, "spec/mongo/crypt/auto_decryption_context_spec.rb".freeze, "spec/mongo/crypt/auto_encrypter_spec.rb".freeze, "spec/mongo/crypt/binding/status_spec.rb".freeze, "spec/mongo/crypt/binding/mongocrypt_spec.rb".freeze, "spec/mongo/crypt/binding/context_spec.rb".freeze, "spec/mongo/crypt/binding/version_spec.rb".freeze, "spec/mongo/crypt/binding/binary_spec.rb".freeze, "spec/mongo/crypt/binding/helpers_spec.rb".freeze, "spec/mongo/crypt/helpers/mongo_crypt_spec_helper.rb".freeze, "spec/mongo/crypt/explicit_decryption_context_spec.rb".freeze, "spec/mongo/crypt/handle_spec.rb".freeze, "spec/mongo/crypt/explicit_encryption_context_spec.rb".freeze, "spec/mongo/crypt/binary_spec.rb".freeze, "spec/mongo/crypt/binding_unloaded_spec.rb".freeze, "spec/mongo/crypt/data_key_context_spec.rb".freeze, "spec/mongo/uri_spec.rb".freeze, "spec/mongo/collection_crud_spec.rb".freeze, "spec/mongo/collection/view/aggregation_spec.rb".freeze, "spec/mongo/collection/view/explainable_spec.rb".freeze, "spec/mongo/collection/view/builder/op_query_spec.rb".freeze, "spec/mongo/collection/view/builder/find_command_spec.rb".freeze, "spec/mongo/collection/view/iterable_spec.rb".freeze, "spec/mongo/collection/view/map_reduce_spec.rb".freeze, "spec/mongo/collection/view/immutable_spec.rb".freeze, "spec/mongo/collection/view/change_stream_resume_spec.rb".freeze, "spec/mongo/collection/view/readable_spec.rb".freeze, "spec/mongo/collection/view/change_stream_spec.rb".freeze, "spec/mongo/collection/view/writable_spec.rb".freeze, "spec/mongo/collection/view_spec.rb".freeze, "spec/mongo/collection_ddl_spec.rb".freeze, "spec/mongo/timeout_spec.rb".freeze, "spec/mongo/srv/monitor_spec.rb".freeze, "spec/mongo/srv/result_spec.rb".freeze, "spec/mongo/cursor/builder/op_get_more_spec.rb".freeze, "spec/mongo/cursor/builder/get_more_command_spec.rb".freeze, "spec/mongo/session_spec.rb".freeze, "spec/mongo/collection_spec.rb".freeze, "spec/mongo/retryable_spec.rb".freeze, "spec/mongo/utils_spec.rb".freeze, "spec/mongo/monitoring/command_log_subscriber_spec.rb".freeze, "spec/mongo/monitoring/event/topology_changed_spec.rb".freeze, "spec/mongo/monitoring/event/command_started_spec.rb".freeze, "spec/mongo/monitoring/event/server_heartbeat_succeeded_spec.rb".freeze, "spec/mongo/monitoring/event/command_succeeded_spec.rb".freeze, "spec/mongo/monitoring/event/command_failed_spec.rb".freeze, "spec/mongo/monitoring/event/server_opening_spec.rb".freeze, "spec/mongo/monitoring/event/server_closed_spec.rb".freeze, "spec/mongo/monitoring/event/topology_closed_spec.rb".freeze, "spec/mongo/monitoring/event/server_description_changed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_check_out_started_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_checked_out_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_checked_in_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_closed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_check_out_failed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/pool_closed_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_ready_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/connection_created_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/pool_created_spec.rb".freeze, "spec/mongo/monitoring/event/cmap/pool_cleared_spec.rb".freeze, "spec/mongo/monitoring/event/server_heartbeat_started_spec.rb".freeze, "spec/mongo/monitoring/event/server_heartbeat_failed_spec.rb".freeze, "spec/mongo/monitoring/event/topology_opening_spec.rb".freeze, "spec/mongo/monitoring/event/secure_spec.rb".freeze, "spec/mongo/write_concern_spec.rb".freeze, "spec/mongo/server_selector/primary_spec.rb".freeze, "spec/mongo/server_selector/nearest_spec.rb".freeze, "spec/mongo/server_selector/secondary_spec.rb".freeze, "spec/mongo/server_selector/secondary_preferred_spec.rb".freeze, "spec/mongo/server_selector/primary_preferred_spec.rb".freeze, "spec/mongo/uri_option_parsing_spec.rb".freeze, "spec/mongo/address_spec.rb".freeze, "spec/mongo/semaphore_spec.rb".freeze, "spec/mongo/socket/tcp_spec.rb".freeze, "spec/mongo/socket/ssl_spec.rb".freeze, "spec/mongo/socket/unix_spec.rb".freeze, "spec/mongo/database_spec.rb".freeze, "spec/mongo/protocol/compressed_spec.rb".freeze, "spec/mongo/protocol/delete_spec.rb".freeze, "spec/mongo/protocol/update_spec.rb".freeze, "spec/mongo/protocol/msg_spec.rb".freeze, "spec/mongo/protocol/kill_cursors_spec.rb".freeze, "spec/mongo/protocol/reply_spec.rb".freeze, "spec/mongo/protocol/get_more_spec.rb".freeze, "spec/mongo/protocol/query_spec.rb".freeze, "spec/mongo/protocol/registry_spec.rb".freeze, "spec/mongo/protocol/insert_spec.rb".freeze, "spec/mongo/session/session_pool_spec.rb".freeze, "spec/mongo/session/server_session_spec.rb".freeze, "spec/mongo/client_encryption_spec.rb".freeze, "spec/mongo/monitoring_spec.rb".freeze, "spec/mongo/bson_spec.rb".freeze, "spec/NOTES.aws-auth.md".freeze, "spec/support/matchers.rb".freeze, "spec/support/aws_utils/inspector.rb".freeze, "spec/support/aws_utils/base.rb".freeze, "spec/support/aws_utils/provisioner.rb".freeze, "spec/support/aws_utils/orchestrator.rb".freeze, "spec/support/spec_setup.rb".freeze, "spec/support/cluster_tools.rb".freeze, "spec/support/json_ext_formatter.rb".freeze, "spec/support/certificates/crl.pem".freeze, "spec/support/certificates/client-x509.key".freeze, "spec/support/certificates/server-int.crt".freeze, "spec/support/certificates/client-encrypted.key".freeze, "spec/support/certificates/client-int.crt".freeze, "spec/support/certificates/client-second-level.key".freeze, "spec/support/certificates/server-second-level-bundle.pem".freeze, "spec/support/certificates/ca.crt".freeze, "spec/support/certificates/client-second-level-bundle.pem".freeze, "spec/support/certificates/multi-ca.crt".freeze, "spec/support/certificates/server.pem".freeze, "spec/support/certificates/client.crt".freeze, "spec/support/certificates/client-x509.crt".freeze, "spec/support/certificates/atlas-ocsp.crt".freeze, "spec/support/certificates/server-second-level.key".freeze, "spec/support/certificates/client-x509.pem".freeze, "spec/support/certificates/python-ca.crt".freeze, "spec/support/certificates/client-second-level.pem".freeze, "spec/support/certificates/client-second-level.crt".freeze, "spec/support/certificates/client.pem".freeze, "spec/support/certificates/server-second-level.pem".freeze, "spec/support/certificates/server-second-level.crt".freeze, "spec/support/certificates/atlas-ocsp-ca.crt".freeze, "spec/support/certificates/crl_client_revoked.pem".freeze, "spec/support/certificates/client.key".freeze, "spec/support/certificates/README.md".freeze, "spec/support/authorization.rb".freeze, "spec/support/utils.rb".freeze, "spec/support/local_resource_registry.rb".freeze, "spec/support/background_thread_registry.rb".freeze, "spec/support/dns.rb".freeze, "spec/support/common_shortcuts.rb".freeze, "spec/support/primary_socket.rb".freeze, "spec/support/shared/session.rb".freeze, "spec/support/shared/app_metadata.rb".freeze, "spec/support/shared/auth_context.rb".freeze, "spec/support/shared/server_selector.rb".freeze, "spec/support/shared/protocol.rb".freeze, "spec/support/shared/scram_conversation.rb".freeze, "spec/support/crypt.rb".freeze, "spec/support/crypt/data_keys/key_document_local.json".freeze, "spec/support/crypt/data_keys/key_document_aws.json".freeze, "spec/support/crypt/limits/limits-schema.json".freeze, "spec/support/crypt/limits/limits-key.json".freeze, "spec/support/crypt/limits/limits-doc.json".freeze, "spec/support/crypt/corpus/corpus.json".freeze, "spec/support/crypt/corpus/corpus-key-aws.json".freeze, "spec/support/crypt/corpus/corpus-schema.json".freeze, "spec/support/crypt/corpus/corpus-key-local.json".freeze, "spec/support/crypt/corpus/corpus_encrypted.json".freeze, "spec/support/crypt/external/external-schema.json".freeze, "spec/support/crypt/external/external-key.json".freeze, "spec/support/crypt/schema_maps/schema_map_aws.json".freeze, "spec/support/crypt/schema_maps/schema_map_local.json".freeze, "spec/support/crypt/schema_maps/schema_map_local_key_alt_names.json".freeze, "spec/support/crypt/schema_maps/schema_map_aws_key_alt_names.json".freeze, "spec/support/spec_config.rb".freeze, "spec/support/sdam_formatter_integration.rb".freeze, "spec/support/client_registry_macros.rb".freeze, "spec/support/aws_utils.rb".freeze, "spec/support/client_registry.rb".freeze, "spec/support/using_hash.rb".freeze, "spec/support/session_registry.rb".freeze, "spec/support/constraints.rb".freeze, "spec/support/monitoring_ext.rb".freeze, "spec/support/keyword_struct.rb".freeze, "spec/atlas/operations_spec.rb".freeze, "spec/atlas/atlas_connectivity_spec.rb".freeze, "spec/spec_helper.rb".freeze, "spec/README.md".freeze] diff --git a/devel/rubygem-pedump/Makefile b/devel/rubygem-pedump/Makefile index d5378e3cbb7..6ed772f23e1 100644 --- a/devel/rubygem-pedump/Makefile +++ b/devel/rubygem-pedump/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pedump -PORTVERSION= 0.6.4 +PORTVERSION= 0.6.5 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-pedump/distinfo b/devel/rubygem-pedump/distinfo index 7ffc91082bc..696a272d493 100644 --- a/devel/rubygem-pedump/distinfo +++ b/devel/rubygem-pedump/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971190 -SHA256 (rubygem/pedump-0.6.4.gem) = dfccba77e30b3f489c15e680eed685a351afe89312a5c20cbbd7aa776e6da79d -SIZE (rubygem/pedump-0.6.4.gem) = 508416 +TIMESTAMP = 1647264828 +SHA256 (rubygem/pedump-0.6.5.gem) = cd7e47dc4ccf5d4485b8ef98700e5b044e15479bd58bba15ad91967e1c68085f +SIZE (rubygem/pedump-0.6.5.gem) = 508416 diff --git a/devel/rubygem-que-scheduler/Makefile b/devel/rubygem-que-scheduler/Makefile new file mode 100644 index 00000000000..aeac78e7e5b --- /dev/null +++ b/devel/rubygem-que-scheduler/Makefile @@ -0,0 +1,23 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= que-scheduler +PORTVERSION= 4.2.2 +CATEGORIES= devel rubygems +MASTER_SITES= RG + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Lightweight cron scheduler for the Que async job worker + +LICENSE= MIT + +RUN_DEPENDS= rubygem-activesupport70>=5.0:devel/rubygem-activesupport70 \ + rubygem-fugit>=1.1.8<2:devel/rubygem-fugit \ + rubygem-hashie>=3<6:devel/rubygem-hashie \ + rubygem-que>=0.12<2.0.0:devel/rubygem-que + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-que-scheduler/distinfo b/devel/rubygem-que-scheduler/distinfo new file mode 100644 index 00000000000..f7dae39d283 --- /dev/null +++ b/devel/rubygem-que-scheduler/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264418 +SHA256 (rubygem/que-scheduler-4.2.2.gem) = 769b04c6da344fc1a103e2e04372719b91b2cadf26b120b049f5353ae1f0135c +SIZE (rubygem/que-scheduler-4.2.2.gem) = 20992 diff --git a/devel/rubygem-que-scheduler/pkg-descr b/devel/rubygem-que-scheduler/pkg-descr new file mode 100644 index 00000000000..8310f92e5d1 --- /dev/null +++ b/devel/rubygem-que-scheduler/pkg-descr @@ -0,0 +1,6 @@ +que-scheduler is an extension to Que that adds support for scheduling items +using a cron style configuration file. It works by running as a que job itself, +determining what needs to be run, enqueueing those jobs, then enqueueing itself +to check again later. + +WWW: https://github.com/hlascelles/que-scheduler diff --git a/devel/rubygem-que/Makefile b/devel/rubygem-que/Makefile index e3d9bfcee51..252b31e6ee8 100644 --- a/devel/rubygem-que/Makefile +++ b/devel/rubygem-que/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= que -PORTVERSION= 1.0.0 +PORTVERSION= 1.3.1 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -11,11 +11,14 @@ COMMENT= Job queue that uses PostgreSQL advisory locks for speed and reliability LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE.txt -USES= gem +USES= gem shebangfix USE_RUBY= yes NO_ARCH= yes PLIST_FILES= bin/que +SHEBANG_FILES= auto/* \ + scripts/* + .include diff --git a/devel/rubygem-que/distinfo b/devel/rubygem-que/distinfo index d0477d654d4..7935f124a04 100644 --- a/devel/rubygem-que/distinfo +++ b/devel/rubygem-que/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971192 -SHA256 (rubygem/que-1.0.0.gem) = 7f8b99028fe9e82a76467d6c87e6e5709f835090b57f52f4566c47cd88e7264b -SIZE (rubygem/que-1.0.0.gem) = 61440 +TIMESTAMP = 1647264830 +SHA256 (rubygem/que-1.3.1.gem) = e4875ef77d283228878ed37f9218d8c83c480df5ade5e69178a8b02ad1e42b31 +SIZE (rubygem/que-1.3.1.gem) = 66560 diff --git a/devel/rubygem-rails-i18n-rails70/Makefile b/devel/rubygem-rails-i18n-rails70/Makefile index 17ae66fe631..ab32068d6b0 100644 --- a/devel/rubygem-rails-i18n-rails70/Makefile +++ b/devel/rubygem-rails-i18n-rails70/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= rails-i18n -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.3 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= -rails70 diff --git a/devel/rubygem-rails-i18n-rails70/distinfo b/devel/rubygem-rails-i18n-rails70/distinfo index 03de9df0eb3..1c2a2847d8f 100644 --- a/devel/rubygem-rails-i18n-rails70/distinfo +++ b/devel/rubygem-rails-i18n-rails70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058806 -SHA256 (rubygem/rails-i18n-7.0.2.gem) = 8e0bca094d670af1296106ed5bfcae36b3a8dd44c1eb0e4b06c482b0d64db397 -SIZE (rubygem/rails-i18n-7.0.2.gem) = 113664 +TIMESTAMP = 1647264832 +SHA256 (rubygem/rails-i18n-7.0.3.gem) = e3158e98c5332d129fd5131f171ac575eb30dbb8919b21595382b08850cf2bd3 +SIZE (rubygem/rails-i18n-7.0.3.gem) = 114176 diff --git a/devel/rubygem-ransack/Makefile b/devel/rubygem-ransack/Makefile index b7c11f6e22b..8dd80dc1845 100644 --- a/devel/rubygem-ransack/Makefile +++ b/devel/rubygem-ransack/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= ransack -PORTVERSION= 2.5.0 +PORTVERSION= 2.6.0 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -11,8 +11,8 @@ COMMENT= Successor to the MetaSearch gem LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= rubygem-activerecord52>=5.2.4:databases/rubygem-activerecord52 \ - rubygem-activesupport52>=5.2.4:devel/rubygem-activesupport52 \ +RUN_DEPENDS= rubygem-activerecord60>=6.0.4:databases/rubygem-activerecord60 \ + rubygem-activesupport60>=6.0.4:devel/rubygem-activesupport60 \ rubygem-i18n>=0:devel/rubygem-i18n USES= gem diff --git a/devel/rubygem-ransack/distinfo b/devel/rubygem-ransack/distinfo index bf3a0a51c3d..4ee9b23b308 100644 --- a/devel/rubygem-ransack/distinfo +++ b/devel/rubygem-ransack/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641046706 -SHA256 (rubygem/ransack-2.5.0.gem) = c285c6659791d47de70caf14b482e693cf73d987de469b15f7df909244651d61 -SIZE (rubygem/ransack-2.5.0.gem) = 265216 +TIMESTAMP = 1647264834 +SHA256 (rubygem/ransack-2.6.0.gem) = db765e0da29ac2eb8f811a06cf46dbb79ebd99f248f1cbba0f6f73bb8656c6a1 +SIZE (rubygem/ransack-2.6.0.gem) = 264704 diff --git a/devel/rubygem-rspec-rails/Makefile b/devel/rubygem-rspec-rails/Makefile index d3651f159bf..f7b4cf06758 100644 --- a/devel/rubygem-rspec-rails/Makefile +++ b/devel/rubygem-rspec-rails/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= rspec-rails -PORTVERSION= 5.1.0 +PORTVERSION= 5.1.1 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-rspec-rails/distinfo b/devel/rubygem-rspec-rails/distinfo index aab8c6cd1a4..31e1c62d644 100644 --- a/devel/rubygem-rspec-rails/distinfo +++ b/devel/rubygem-rspec-rails/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643382624 -SHA256 (rubygem/rspec-rails-5.1.0.gem) = 9bbf5a6c09b9454a8098dc554b2bf7264ef68466a531004508043501d7e0803b -SIZE (rubygem/rspec-rails-5.1.0.gem) = 69632 +TIMESTAMP = 1647264836 +SHA256 (rubygem/rspec-rails-5.1.1.gem) = ca214f0570eb5e531740b040e4ac66c41f18b55f84a994cc8f88520193fe454a +SIZE (rubygem/rspec-rails-5.1.1.gem) = 69632 diff --git a/devel/rubygem-rubocop-performance/Makefile b/devel/rubygem-rubocop-performance/Makefile index 8190a4f84fc..d9632c29f93 100644 --- a/devel/rubygem-rubocop-performance/Makefile +++ b/devel/rubygem-rubocop-performance/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= rubocop-performance -PORTVERSION= 1.13.2 +PORTVERSION= 1.13.3 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-rubocop-performance/distinfo b/devel/rubygem-rubocop-performance/distinfo index e3f7dda7f95..a3fc10a7f30 100644 --- a/devel/rubygem-rubocop-performance/distinfo +++ b/devel/rubygem-rubocop-performance/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133985 -SHA256 (rubygem/rubocop-performance-1.13.2.gem) = b780ab7f567b477c767090a3bf85845df08637ebe6fa0255c790809459d80b05 -SIZE (rubygem/rubocop-performance-1.13.2.gem) = 40960 +TIMESTAMP = 1647264838 +SHA256 (rubygem/rubocop-performance-1.13.3.gem) = 9d9ccca13f540d6677b5e97978ad3ee5498f143dd156a463de59bee25b40de9e +SIZE (rubygem/rubocop-performance-1.13.3.gem) = 40960 diff --git a/devel/rubygem-rubocop-rspec/Makefile b/devel/rubygem-rubocop-rspec/Makefile index 2ae07144902..2b1019b582c 100644 --- a/devel/rubygem-rubocop-rspec/Makefile +++ b/devel/rubygem-rubocop-rspec/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= rubocop-rspec -PORTVERSION= 2.8.0 +PORTVERSION= 2.9.0 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-rubocop-rspec/distinfo b/devel/rubygem-rubocop-rspec/distinfo index 63493e88261..5f7e39cfebb 100644 --- a/devel/rubygem-rubocop-rspec/distinfo +++ b/devel/rubygem-rubocop-rspec/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133987 -SHA256 (rubygem/rubocop-rspec-2.8.0.gem) = 98cfc44d5ec2339454556ee2125871e87a5dfdb73bc652bcc479e719c731385c -SIZE (rubygem/rubocop-rspec-2.8.0.gem) = 72192 +TIMESTAMP = 1647264840 +SHA256 (rubygem/rubocop-rspec-2.9.0.gem) = c6d91197f6d799f68c8dd0098b0259cc28151302763432f0233b26f26fc22f14 +SIZE (rubygem/rubocop-rspec-2.9.0.gem) = 72704 diff --git a/devel/rubygem-sentry-rails/Makefile b/devel/rubygem-sentry-rails/Makefile new file mode 100644 index 00000000000..136ea0e14cc --- /dev/null +++ b/devel/rubygem-sentry-rails/Makefile @@ -0,0 +1,22 @@ +# Created by: Matthias Fechner + +PORTNAME= sentry-rails +PORTVERSION= 5.1.1 +CATEGORIES= devel rubygems +MASTER_SITES= RG + +MAINTAINER= mfechner@FreeBSD.org +COMMENT= Client interface for the Sentry error logger + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +RUN_DEPENDS= rubygem-railties61>=5.0.0:www/rubygem-railties61 \ + rubygem-sentry-ruby-core>=5.1.1<5.2:devel/rubygem-sentry-ruby-core + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-sentry-rails/distinfo b/devel/rubygem-sentry-rails/distinfo new file mode 100644 index 00000000000..2238b3bb809 --- /dev/null +++ b/devel/rubygem-sentry-rails/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647943826 +SHA256 (rubygem/sentry-rails-5.1.1.gem) = 906ef0a776ddc35884ab8b548856ba81c607e3fdee7c9c9f7c44efccc16a657f +SIZE (rubygem/sentry-rails-5.1.1.gem) = 18944 diff --git a/devel/rubygem-sentry-rails/pkg-descr b/devel/rubygem-sentry-rails/pkg-descr new file mode 100644 index 00000000000..e4b2c29218d --- /dev/null +++ b/devel/rubygem-sentry-rails/pkg-descr @@ -0,0 +1,4 @@ +The official Ruby-language client and integration layer for the Sentry error +reporting API. + +WWW: https://github.com/getsentry/sentry-ruby diff --git a/devel/rubygem-sentry-ruby-core/Makefile b/devel/rubygem-sentry-ruby-core/Makefile new file mode 100644 index 00000000000..d45b1c1fd98 --- /dev/null +++ b/devel/rubygem-sentry-ruby-core/Makefile @@ -0,0 +1,21 @@ +# Created by: Matthias Fechner + +PORTNAME= sentry-ruby-core +PORTVERSION= 5.1.1 +CATEGORIES= devel rubygems +MASTER_SITES= RG + +MAINTAINER= mfechner@FreeBSD.org +COMMENT= Client interface for the Sentry error logger + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +RUN_DEPENDS= rubygem-concurrent-ruby>=0:devel/rubygem-concurrent-ruby + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-sentry-ruby-core/distinfo b/devel/rubygem-sentry-ruby-core/distinfo new file mode 100644 index 00000000000..8039ff48b8f --- /dev/null +++ b/devel/rubygem-sentry-ruby-core/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647943576 +SHA256 (rubygem/sentry-ruby-core-5.1.1.gem) = 152ed891ee78348da448a65237be92990f3e8b9e5b34bb39003eade48cca5d04 +SIZE (rubygem/sentry-ruby-core-5.1.1.gem) = 47616 diff --git a/devel/rubygem-sentry-ruby-core/pkg-descr b/devel/rubygem-sentry-ruby-core/pkg-descr new file mode 100644 index 00000000000..e4b2c29218d --- /dev/null +++ b/devel/rubygem-sentry-ruby-core/pkg-descr @@ -0,0 +1,4 @@ +The official Ruby-language client and integration layer for the Sentry error +reporting API. + +WWW: https://github.com/getsentry/sentry-ruby diff --git a/devel/rubygem-sentry-ruby/Makefile b/devel/rubygem-sentry-ruby/Makefile new file mode 100644 index 00000000000..2c960f71586 --- /dev/null +++ b/devel/rubygem-sentry-ruby/Makefile @@ -0,0 +1,22 @@ +# Created by: Matthias Fechner + +PORTNAME= sentry-ruby +PORTVERSION= 5.1.1 +CATEGORIES= devel rubygems +MASTER_SITES= RG + +MAINTAINER= mfechner@FreeBSD.org +COMMENT= Client interface for the Sentry error logger + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +RUN_DEPENDS= rubygem-concurrent-ruby>=1.0.2<2:devel/rubygem-concurrent-ruby \ + rubygem-sentry-ruby-core>=5.1.1<5.1.2:devel/rubygem-sentry-ruby-core + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-sentry-ruby/distinfo b/devel/rubygem-sentry-ruby/distinfo new file mode 100644 index 00000000000..e8cd5f933c7 --- /dev/null +++ b/devel/rubygem-sentry-ruby/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647943440 +SHA256 (rubygem/sentry-ruby-5.1.1.gem) = 4e49563b72c1c22ffe3a67e5024b856c364766146d01e270423fd494d8fcc125 +SIZE (rubygem/sentry-ruby-5.1.1.gem) = 7168 diff --git a/devel/rubygem-sentry-ruby/pkg-descr b/devel/rubygem-sentry-ruby/pkg-descr new file mode 100644 index 00000000000..e4b2c29218d --- /dev/null +++ b/devel/rubygem-sentry-ruby/pkg-descr @@ -0,0 +1,4 @@ +The official Ruby-language client and integration layer for the Sentry error +reporting API. + +WWW: https://github.com/getsentry/sentry-ruby diff --git a/devel/rubygem-sentry-sidekiq/Makefile b/devel/rubygem-sentry-sidekiq/Makefile new file mode 100644 index 00000000000..96ca26a6062 --- /dev/null +++ b/devel/rubygem-sentry-sidekiq/Makefile @@ -0,0 +1,22 @@ +# Created by: Matthias Fechner + +PORTNAME= sentry-sidekiq +PORTVERSION= 5.1.1 +CATEGORIES= devel rubygems +MASTER_SITES= RG + +MAINTAINER= mfechner@FreeBSD.org +COMMENT= Client interface for the Sentry error logger + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +RUN_DEPENDS= rubygem-sentry-ruby-core>=5.1.1<5.2:devel/rubygem-sentry-ruby-core \ + rubygem-sidekiq>=3.0:devel/rubygem-sidekiq + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-sentry-sidekiq/distinfo b/devel/rubygem-sentry-sidekiq/distinfo new file mode 100644 index 00000000000..39b76f426af --- /dev/null +++ b/devel/rubygem-sentry-sidekiq/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647944028 +SHA256 (rubygem/sentry-sidekiq-5.1.1.gem) = e4c3618577fba37f7a9fc3812013a7868d09e0e0a0970efc605e6e184079d1af +SIZE (rubygem/sentry-sidekiq-5.1.1.gem) = 12288 diff --git a/devel/rubygem-sentry-sidekiq/pkg-descr b/devel/rubygem-sentry-sidekiq/pkg-descr new file mode 100644 index 00000000000..e4b2c29218d --- /dev/null +++ b/devel/rubygem-sentry-sidekiq/pkg-descr @@ -0,0 +1,4 @@ +The official Ruby-language client and integration layer for the Sentry error +reporting API. + +WWW: https://github.com/getsentry/sentry-ruby diff --git a/devel/rubygem-sprockets/Makefile b/devel/rubygem-sprockets/Makefile index 2719b9d9eed..3cd8224437d 100644 --- a/devel/rubygem-sprockets/Makefile +++ b/devel/rubygem-sprockets/Makefile @@ -1,7 +1,7 @@ # Created by: Mikhail T. PORTNAME= sprockets -PORTVERSION= 4.0.2 +PORTVERSION= 4.0.3 CATEGORIES= devel rubygems MASTER_SITES= RG diff --git a/devel/rubygem-sprockets/distinfo b/devel/rubygem-sprockets/distinfo index 0c2f824601c..1bc250479f2 100644 --- a/devel/rubygem-sprockets/distinfo +++ b/devel/rubygem-sprockets/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1592325758 -SHA256 (rubygem/sprockets-4.0.2.gem) = 68d44758ae3da4f172c80abeff323100b4c4bb2f0ff6e1a3cb6e6c69e8e26f46 -SIZE (rubygem/sprockets-4.0.2.gem) = 81408 +TIMESTAMP = 1647264842 +SHA256 (rubygem/sprockets-4.0.3.gem) = a07a58b0f6220a5a2ee2fb78498c7c975e6a0b3b4b4df2eba466f9871e6765a6 +SIZE (rubygem/sprockets-4.0.3.gem) = 82432 diff --git a/devel/rubygem-view_component-rails61/Makefile b/devel/rubygem-view_component-rails61/Makefile new file mode 100644 index 00000000000..61f56fdab5b --- /dev/null +++ b/devel/rubygem-view_component-rails61/Makefile @@ -0,0 +1,23 @@ +# Created by: mfechner@FreeBSD.org + +PORTNAME= view_component +PORTVERSION= 2.50.0 +CATEGORIES= devel rubygems +MASTER_SITES= RG +PKGNAMESUFFIX= -rails61 + +MAINTAINER= mfechner@FreeBSD.org +COMMENT= View components for Rails + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +RUN_DEPENDS= rubygem-activesupport61>=5<8:devel/rubygem-activesupport61 \ + rubygem-method_source>=1.0<2:devel/rubygem-method_source + +USES= gem +USE_RUBY= yes + +NO_ARCH= yes + +.include diff --git a/devel/rubygem-view_component-rails61/distinfo b/devel/rubygem-view_component-rails61/distinfo new file mode 100644 index 00000000000..ecb90e502f1 --- /dev/null +++ b/devel/rubygem-view_component-rails61/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647940197 +SHA256 (rubygem/view_component-2.50.0.gem) = c1cdbe62ebe23e4b1fbf6a140782f8865d9e4d972330a514868a4e9c6d5dd0eb +SIZE (rubygem/view_component-2.50.0.gem) = 51200 diff --git a/devel/rubygem-view_component-rails61/pkg-descr b/devel/rubygem-view_component-rails61/pkg-descr new file mode 100644 index 00000000000..b02a75175d7 --- /dev/null +++ b/devel/rubygem-view_component-rails61/pkg-descr @@ -0,0 +1,3 @@ +View components for Rails + +WWW: https://github.com/github/view_component diff --git a/devel/rubygem-xdg/Makefile b/devel/rubygem-xdg/Makefile index cc158b53663..93c5394f76e 100644 --- a/devel/rubygem-xdg/Makefile +++ b/devel/rubygem-xdg/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= xdg -PORTVERSION= 6.3.0 +PORTVERSION= 6.3.1 CATEGORIES= devel rubygems MASTER_SITES= RG @@ -9,7 +9,7 @@ MAINTAINER= sunpoet@FreeBSD.org COMMENT= XDG Base Directory Standard Library for Ruby LICENSE= HIPPOCRATIC -LICENSE_NAME= Hippocratic License +LICENSE_NAME= Hippocratic License 2.1.0 LICENSE_FILE= ${WRKSRC}/LICENSE.adoc LICENSE_PERMS= auto-accept dist-mirror pkg-mirror diff --git a/devel/rubygem-xdg/distinfo b/devel/rubygem-xdg/distinfo index 91fba3f8fc2..3558de0647c 100644 --- a/devel/rubygem-xdg/distinfo +++ b/devel/rubygem-xdg/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058832 -SHA256 (rubygem/xdg-6.3.0.gem) = d9ab941fddadfd0488f0e4b70930b23c9a331006fb03191585653a0baedb0840 -SIZE (rubygem/xdg-6.3.0.gem) = 19968 +TIMESTAMP = 1647264844 +SHA256 (rubygem/xdg-6.3.1.gem) = 76bd24c6b923944ef5d82e39b21fc79f5623a7ce713c1d337f6e799a0db1c67f +SIZE (rubygem/xdg-6.3.1.gem) = 16896 diff --git a/devel/rubygem-xdg3/Makefile b/devel/rubygem-xdg3/Makefile index 816c88cedfa..cf9ed1bfb0d 100644 --- a/devel/rubygem-xdg3/Makefile +++ b/devel/rubygem-xdg3/Makefile @@ -2,6 +2,7 @@ PORTNAME= xdg PORTVERSION= 3.1.1 +PORTREVISION= 1 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 3 diff --git a/devel/rubygem-xdg3/files/patch-gemspec b/devel/rubygem-xdg3/files/patch-gemspec new file mode 100644 index 00000000000..78b5f237232 --- /dev/null +++ b/devel/rubygem-xdg3/files/patch-gemspec @@ -0,0 +1,11 @@ +--- xdg.gemspec.orig 2022-03-13 13:35:42 UTC ++++ xdg.gemspec +@@ -16,7 +16,7 @@ Gem::Specification.new do |s| + s.files = ["LICENSE.md".freeze, "README.md".freeze, "lib/xdg.rb".freeze, "lib/xdg/cache.rb".freeze, "lib/xdg/config.rb".freeze, "lib/xdg/data.rb".freeze, "lib/xdg/environment.rb".freeze, "lib/xdg/identity.rb".freeze, "lib/xdg/pair.rb".freeze, "lib/xdg/paths/combined.rb".freeze, "lib/xdg/paths/directory.rb".freeze, "lib/xdg/paths/standard.rb".freeze] + s.homepage = "https://github.com/bkuhlmann/xdg".freeze + s.licenses = ["Apache-2.0".freeze] +- s.required_ruby_version = Gem::Requirement.new("~> 2.6".freeze) ++ s.required_ruby_version = Gem::Requirement.new(">= 2.6".freeze) + s.rubygems_version = "3.3.9".freeze + s.summary = "Provides an implementation of the XDG Base Directory Specification.".freeze + diff --git a/devel/rubygem-xdg4/Makefile b/devel/rubygem-xdg4/Makefile index 8c2188bdec0..bd3eeaf8dd0 100644 --- a/devel/rubygem-xdg4/Makefile +++ b/devel/rubygem-xdg4/Makefile @@ -2,6 +2,7 @@ PORTNAME= xdg PORTVERSION= 4.5.0 +PORTREVISION= 1 CATEGORIES= devel rubygems MASTER_SITES= RG PKGNAMESUFFIX= 4 diff --git a/devel/rubygem-xdg4/files/patch-gemspec b/devel/rubygem-xdg4/files/patch-gemspec new file mode 100644 index 00000000000..b5cbc0ccb53 --- /dev/null +++ b/devel/rubygem-xdg4/files/patch-gemspec @@ -0,0 +1,11 @@ +--- xdg.gemspec.orig 2022-03-13 13:39:29 UTC ++++ xdg.gemspec +@@ -16,7 +16,7 @@ Gem::Specification.new do |s| + s.files = ["LICENSE.adoc".freeze, "README.adoc".freeze, "lib/xdg.rb".freeze, "lib/xdg/cache.rb".freeze, "lib/xdg/config.rb".freeze, "lib/xdg/data.rb".freeze, "lib/xdg/environment.rb".freeze, "lib/xdg/identity.rb".freeze, "lib/xdg/pair.rb".freeze, "lib/xdg/paths/combined.rb".freeze, "lib/xdg/paths/directory.rb".freeze, "lib/xdg/paths/home.rb".freeze] + s.homepage = "https://www.alchemists.io/projects/xdg".freeze + s.licenses = ["Apache-2.0".freeze] +- s.required_ruby_version = Gem::Requirement.new("~> 2.7".freeze) ++ s.required_ruby_version = Gem::Requirement.new(">= 2.7".freeze) + s.rubygems_version = "3.3.9".freeze + s.summary = "Provides an implementation of the XDG Base Directory Specification.".freeze + end diff --git a/devel/rust-cbindgen/Makefile b/devel/rust-cbindgen/Makefile index 491204e3889..f36dec6c6f6 100644 --- a/devel/rust-cbindgen/Makefile +++ b/devel/rust-cbindgen/Makefile @@ -1,6 +1,5 @@ PORTNAME= cbindgen -DISTVERSION= 0.20.0 -PORTREVISION= 7 +DISTVERSION= 0.21.0 CATEGORIES= devel MASTER_SITES= CRATESIO PKGNAMEPREFIX= rust- @@ -17,16 +16,15 @@ RUN_DEPENDS= cargo:lang/${RUST_DEFAULT} USES= cargo PLIST_FILES= bin/${PORTNAME} -CARGO_CRATES= ansi_term-0.11.0 \ - atty-0.2.14 \ +CARGO_CRATES= atty-0.2.14 \ autocfg-1.0.1 \ bitflags-1.2.1 \ cfg-if-0.1.10 \ - clap-2.33.3 \ + clap-3.1.6 \ cloudabi-0.0.3 \ getrandom-0.1.15 \ hashbrown-0.9.1 \ - heck-0.3.1 \ + heck-0.4.0 \ hermit-abi-0.1.16 \ indexmap-1.6.0 \ itoa-0.4.6 \ @@ -34,6 +32,8 @@ CARGO_CRATES= ansi_term-0.11.0 \ libc-0.2.77 \ lock_api-0.3.4 \ log-0.4.11 \ + memchr-2.4.1 \ + os_str_bytes-6.0.0 \ parking_lot-0.10.2 \ parking_lot_core-0.7.2 \ ppv-lite86-0.2.9 \ @@ -53,18 +53,17 @@ CARGO_CRATES= ansi_term-0.11.0 \ serial_test-0.5.0 \ serial_test_derive-0.5.0 \ smallvec-1.4.2 \ - strsim-0.8.0 \ + strsim-0.10.0 \ syn-1.0.41 \ tempfile-3.1.0 \ - textwrap-0.11.0 \ + termcolor-1.1.3 \ + textwrap-0.15.0 \ toml-0.5.6 \ - unicode-segmentation-1.6.0 \ - unicode-width-0.1.8 \ unicode-xid-0.2.1 \ - vec_map-0.8.2 \ wasi-0.9.0+wasi-snapshot-preview1 \ winapi-0.3.9 \ winapi-i686-pc-windows-gnu-0.4.0 \ + winapi-util-0.1.5 \ winapi-x86_64-pc-windows-gnu-0.4.0 .include diff --git a/devel/rust-cbindgen/distinfo b/devel/rust-cbindgen/distinfo index b48a5e5dfca..ea32fa190e1 100644 --- a/devel/rust-cbindgen/distinfo +++ b/devel/rust-cbindgen/distinfo @@ -1,8 +1,6 @@ -TIMESTAMP = 1627561755 -SHA256 (rust/crates/cbindgen-0.20.0.crate) = 51e3973b165dc0f435831a9e426de67e894de532754ff7a3f307c03ee5dec7dc -SIZE (rust/crates/cbindgen-0.20.0.crate) = 183277 -SHA256 (rust/crates/ansi_term-0.11.0.crate) = ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b -SIZE (rust/crates/ansi_term-0.11.0.crate) = 17087 +TIMESTAMP = 1648230743 +SHA256 (rust/crates/cbindgen-0.21.0.crate) = 485ede05a56152367a6ec586a7425b475d6c3d3838581ff651d2a6e3730a62ef +SIZE (rust/crates/cbindgen-0.21.0.crate) = 184825 SHA256 (rust/crates/atty-0.2.14.crate) = d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8 SIZE (rust/crates/atty-0.2.14.crate) = 5470 SHA256 (rust/crates/autocfg-1.0.1.crate) = cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a @@ -11,16 +9,16 @@ SHA256 (rust/crates/bitflags-1.2.1.crate) = cf1de2fe8c75bc145a2f577add951f813488 SIZE (rust/crates/bitflags-1.2.1.crate) = 16745 SHA256 (rust/crates/cfg-if-0.1.10.crate) = 4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822 SIZE (rust/crates/cfg-if-0.1.10.crate) = 7933 -SHA256 (rust/crates/clap-2.33.3.crate) = 37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002 -SIZE (rust/crates/clap-2.33.3.crate) = 201925 +SHA256 (rust/crates/clap-3.1.6.crate) = d8c93436c21e4698bacadf42917db28b23017027a4deccb35dbe47a7e7840123 +SIZE (rust/crates/clap-3.1.6.crate) = 202211 SHA256 (rust/crates/cloudabi-0.0.3.crate) = ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f SIZE (rust/crates/cloudabi-0.0.3.crate) = 22156 SHA256 (rust/crates/getrandom-0.1.15.crate) = fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6 SIZE (rust/crates/getrandom-0.1.15.crate) = 24786 SHA256 (rust/crates/hashbrown-0.9.1.crate) = d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04 SIZE (rust/crates/hashbrown-0.9.1.crate) = 77734 -SHA256 (rust/crates/heck-0.3.1.crate) = 20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205 -SIZE (rust/crates/heck-0.3.1.crate) = 54666 +SHA256 (rust/crates/heck-0.4.0.crate) = 2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9 +SIZE (rust/crates/heck-0.4.0.crate) = 11161 SHA256 (rust/crates/hermit-abi-0.1.16.crate) = 4c30f6d0bc6b00693347368a67d41b58f2fb851215ff1da49e90fe2c5c667151 SIZE (rust/crates/hermit-abi-0.1.16.crate) = 9889 SHA256 (rust/crates/indexmap-1.6.0.crate) = 55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2 @@ -35,6 +33,10 @@ SHA256 (rust/crates/lock_api-0.3.4.crate) = c4da24a77a3d8a6d4862d95f72e6fdb9c09a SIZE (rust/crates/lock_api-0.3.4.crate) = 18750 SHA256 (rust/crates/log-0.4.11.crate) = 4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b SIZE (rust/crates/log-0.4.11.crate) = 36276 +SHA256 (rust/crates/memchr-2.4.1.crate) = 308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a +SIZE (rust/crates/memchr-2.4.1.crate) = 64977 +SHA256 (rust/crates/os_str_bytes-6.0.0.crate) = 8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64 +SIZE (rust/crates/os_str_bytes-6.0.0.crate) = 21046 SHA256 (rust/crates/parking_lot-0.10.2.crate) = d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e SIZE (rust/crates/parking_lot-0.10.2.crate) = 39536 SHA256 (rust/crates/parking_lot_core-0.7.2.crate) = d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3 @@ -73,29 +75,27 @@ SHA256 (rust/crates/serial_test_derive-0.5.0.crate) = 65f59259be9fc1bf677d06cc14 SIZE (rust/crates/serial_test_derive-0.5.0.crate) = 2899 SHA256 (rust/crates/smallvec-1.4.2.crate) = fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252 SIZE (rust/crates/smallvec-1.4.2.crate) = 26109 -SHA256 (rust/crates/strsim-0.8.0.crate) = 8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a -SIZE (rust/crates/strsim-0.8.0.crate) = 9309 +SHA256 (rust/crates/strsim-0.10.0.crate) = 73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623 +SIZE (rust/crates/strsim-0.10.0.crate) = 11355 SHA256 (rust/crates/syn-1.0.41.crate) = 6690e3e9f692504b941dc6c3b188fd28df054f7fb8469ab40680df52fdcc842b SIZE (rust/crates/syn-1.0.41.crate) = 224979 SHA256 (rust/crates/tempfile-3.1.0.crate) = 7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9 SIZE (rust/crates/tempfile-3.1.0.crate) = 25823 -SHA256 (rust/crates/textwrap-0.11.0.crate) = d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060 -SIZE (rust/crates/textwrap-0.11.0.crate) = 17322 +SHA256 (rust/crates/termcolor-1.1.3.crate) = bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755 +SIZE (rust/crates/termcolor-1.1.3.crate) = 17242 +SHA256 (rust/crates/textwrap-0.15.0.crate) = b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb +SIZE (rust/crates/textwrap-0.15.0.crate) = 52998 SHA256 (rust/crates/toml-0.5.6.crate) = ffc92d160b1eef40665be3a05630d003936a3bc7da7421277846c2613e92c71a SIZE (rust/crates/toml-0.5.6.crate) = 54341 -SHA256 (rust/crates/unicode-segmentation-1.6.0.crate) = e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0 -SIZE (rust/crates/unicode-segmentation-1.6.0.crate) = 90703 -SHA256 (rust/crates/unicode-width-0.1.8.crate) = 9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3 -SIZE (rust/crates/unicode-width-0.1.8.crate) = 16732 SHA256 (rust/crates/unicode-xid-0.2.1.crate) = f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564 SIZE (rust/crates/unicode-xid-0.2.1.crate) = 14392 -SHA256 (rust/crates/vec_map-0.8.2.crate) = f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191 -SIZE (rust/crates/vec_map-0.8.2.crate) = 14466 SHA256 (rust/crates/wasi-0.9.0+wasi-snapshot-preview1.crate) = cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519 SIZE (rust/crates/wasi-0.9.0+wasi-snapshot-preview1.crate) = 31521 SHA256 (rust/crates/winapi-0.3.9.crate) = 5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419 SIZE (rust/crates/winapi-0.3.9.crate) = 1200382 SHA256 (rust/crates/winapi-i686-pc-windows-gnu-0.4.0.crate) = ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6 SIZE (rust/crates/winapi-i686-pc-windows-gnu-0.4.0.crate) = 2918815 +SHA256 (rust/crates/winapi-util-0.1.5.crate) = 70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178 +SIZE (rust/crates/winapi-util-0.1.5.crate) = 10164 SHA256 (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.crate) = 712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f SIZE (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.crate) = 2947998 diff --git a/devel/shiboken2/Makefile b/devel/shiboken2/Makefile index 63de66c748b..6f1817cbc14 100644 --- a/devel/shiboken2/Makefile +++ b/devel/shiboken2/Makefile @@ -1,6 +1,6 @@ PORTNAME= shiboken2 DISTVERSION= 5.15.2 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= devel MASTER_SITES= QT/official_releases/QtForPython/shiboken2/PySide2-${DISTVERSION}-src PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/devel/smooth/Makefile b/devel/smooth/Makefile index 7a12d82e001..c3328897429 100644 --- a/devel/smooth/Makefile +++ b/devel/smooth/Makefile @@ -2,6 +2,7 @@ PORTNAME= smooth PORTVERSION= 0.9.9 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= SF diff --git a/devel/sonarqube-community/Makefile b/devel/sonarqube-community/Makefile index bf860c27af1..916a6c07887 100644 --- a/devel/sonarqube-community/Makefile +++ b/devel/sonarqube-community/Makefile @@ -1,6 +1,6 @@ PORTNAME= sonarqube DISTVERSION= 9.3.0.51899 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= devel security www java MASTER_SITES= https://binaries.sonarsource.com/Distribution/sonarqube/:m_core \ https://binaries.sonarsource.com/Distribution/sonar-csharp-plugin/:m_csharp \ @@ -52,10 +52,10 @@ DATA_DIR= ${DBBASE_DIR}/data TEMP_DIR= ${DBBASE_DIR}/temp SONARCSHARP_VER= 8.36.1.44192 -SONARJAVA_VER= 7.9.0.28969 +SONARJAVA_VER= 7.10.0.29108 SONARJAVASCRIPT_VER= 9.0.0.17505 SONARPHP_VER= 3.23.0.8726 -SONARPYTHON_VER= 3.10.0.9380 +SONARPYTHON_VER= 3.11.0.9522 SONARVBNET_VER= ${SONARCSHARP_VER} USERS= sonarqube diff --git a/devel/sonarqube-community/distinfo b/devel/sonarqube-community/distinfo index 775227cb765..87d9a2e4bd4 100644 --- a/devel/sonarqube-community/distinfo +++ b/devel/sonarqube-community/distinfo @@ -1,15 +1,15 @@ -TIMESTAMP = 1647509071 +TIMESTAMP = 1648294960 SHA256 (sonarqube-9.3.0.51899.zip) = 74108676ed881e5a10ad53b42bc8b343e868be37b4d36705b447dc80ed9f2c1c SIZE (sonarqube-9.3.0.51899.zip) = 276912542 SHA256 (sonar-csharp-plugin-8.36.1.44192.jar) = 0807d9bc56e0ce51ae82322694b5cf9607b9f295bf7fcf215712ee602f1bba89 SIZE (sonar-csharp-plugin-8.36.1.44192.jar) = 5373080 -SHA256 (sonar-java-plugin-7.9.0.28969.jar) = 19b9cd6c16c1087119d04a59b3740a6a630842a2b184c9745a46bd6624156dc0 -SIZE (sonar-java-plugin-7.9.0.28969.jar) = 17530082 +SHA256 (sonar-java-plugin-7.10.0.29108.jar) = ead4134a23c8069998e39963843119124f16ee910cb4d5fef0d3c6e3242caac9 +SIZE (sonar-java-plugin-7.10.0.29108.jar) = 17602962 SHA256 (sonar-javascript-plugin-9.0.0.17505.jar) = 9cf5e7dce566cdd4732ddf606ae61153f55043867c340dceb4893039e7e14d80 SIZE (sonar-javascript-plugin-9.0.0.17505.jar) = 22047276 SHA256 (sonar-php-plugin-3.23.0.8726.jar) = f182f62a01c82032fe5d4cc9697af88db8ec70d7d812d854cede842904fc96d2 SIZE (sonar-php-plugin-3.23.0.8726.jar) = 5573894 -SHA256 (sonar-python-plugin-3.10.0.9380.jar) = 7ef015e78fa22cff1d316e538440f21f99fb4d7bd41971d7c99eccf89ccbd393 -SIZE (sonar-python-plugin-3.10.0.9380.jar) = 7613172 +SHA256 (sonar-python-plugin-3.11.0.9522.jar) = 3f4e34a4fe1779903c7cc78b56c4a85aeb1893d108f0c17dd043653d457207d3 +SIZE (sonar-python-plugin-3.11.0.9522.jar) = 7694870 SHA256 (sonar-vbnet-plugin-8.36.1.44192.jar) = e3036903293b217dfbe07d5d1d8a7bc7a9a7e99c9e821f6f1b5061c2e9e85d24 SIZE (sonar-vbnet-plugin-8.36.1.44192.jar) = 4203896 diff --git a/devel/sope/Makefile b/devel/sope/Makefile index 0b01caf4fc2..a99456d5268 100644 --- a/devel/sope/Makefile +++ b/devel/sope/Makefile @@ -1,5 +1,6 @@ PORTNAME= sope PORTVERSION= 5.5.1 +PORTREVISION= 1 CATEGORIES= devel gnustep MASTER_SITES= http://www.sogo.nu/files/downloads/SOGo/Sources/ DISTNAME= SOPE-${PORTVERSION} diff --git a/devel/sope2/Makefile b/devel/sope2/Makefile index 9dfd4391d61..faccca28dbb 100644 --- a/devel/sope2/Makefile +++ b/devel/sope2/Makefile @@ -2,7 +2,7 @@ PORTNAME= sope2 PORTVERSION= 2.4.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel gnustep MASTER_SITES= http://www.sogo.nu/files/downloads/SOGo/Sources/ DISTNAME= SOPE-${PORTVERSION} diff --git a/devel/tclxml/Makefile b/devel/tclxml/Makefile index 86276132197..e2a692f6135 100644 --- a/devel/tclxml/Makefile +++ b/devel/tclxml/Makefile @@ -2,7 +2,7 @@ PORTNAME= tclxml PORTVERSION= 3.3 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= devel tcl MASTER_SITES= LOCAL/bf SF/tclxml/TclXML/${PORTVERSION} diff --git a/devel/ticcutils/Makefile b/devel/ticcutils/Makefile index 3f2d591f33d..0010bf0f3b1 100644 --- a/devel/ticcutils/Makefile +++ b/devel/ticcutils/Makefile @@ -1,6 +1,7 @@ PORTNAME= ticcutils DISTVERSIONPREFIX= v DISTVERSION= 0.28 +PORTREVISION= 1 CATEGORIES= devel textproc MAINTAINER= yuri@FreeBSD.org diff --git a/devel/umbrello/Makefile b/devel/umbrello/Makefile index 3df834f89c1..706eae31969 100644 --- a/devel/umbrello/Makefile +++ b/devel/umbrello/Makefile @@ -1,5 +1,6 @@ PORTNAME= umbrello DISTVERSION= ${KDE_APPLICATIONS_VERSION} +PORTREVISION= 1 CATEGORIES= devel kde kde-applications MAINTAINER= kde@FreeBSD.org diff --git a/devel/universal-ctags/Makefile b/devel/universal-ctags/Makefile index 11f4cbacda4..4599d76a0ab 100644 --- a/devel/universal-ctags/Makefile +++ b/devel/universal-ctags/Makefile @@ -2,6 +2,7 @@ PORTNAME= universal-ctags PORTVERSION= p5.9.20220306.0 +PORTREVISION= 1 CATEGORIES= devel MAINTAINER= dereks@lifeofadishwasher.com diff --git a/devel/wf-config/Makefile b/devel/wf-config/Makefile index 88ea4c39931..fe8365504dc 100644 --- a/devel/wf-config/Makefile +++ b/devel/wf-config/Makefile @@ -1,6 +1,7 @@ PORTNAME= wf-config DISTVERSIONPREFIX= v DISTVERSION= 0.7.1 +PORTREVISION= 1 CATEGORIES= devel MAINTAINER= jbeich@FreeBSD.org diff --git a/devel/xdg-user-dirs/Makefile b/devel/xdg-user-dirs/Makefile index 1e8d23af474..4ccc1fcfae7 100644 --- a/devel/xdg-user-dirs/Makefile +++ b/devel/xdg-user-dirs/Makefile @@ -1,5 +1,6 @@ PORTNAME= xdg-user-dirs DISTVERSION= 0.17 +PORTREVISION= 1 CATEGORIES= devel MASTER_SITES= http://user-dirs.freedesktop.org/releases/ diff --git a/devel/z88dk/Makefile b/devel/z88dk/Makefile index d056ab26cac..4b6463db5b8 100644 --- a/devel/z88dk/Makefile +++ b/devel/z88dk/Makefile @@ -2,7 +2,7 @@ PORTNAME= z88dk PORTVERSION= 2.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel MASTER_SITES= SF/${PORTNAME}/${PORTVERSION} DISTNAME= ${PORTNAME}-src-${PORTVERSION} diff --git a/devel/zapcc/Makefile b/devel/zapcc/Makefile index c15ad42da80..c8b18d5698b 100644 --- a/devel/zapcc/Makefile +++ b/devel/zapcc/Makefile @@ -1,6 +1,6 @@ PORTNAME= zapcc PORTVERSION= g20180622 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= devel MAINTAINER= freebsd@sysctl.cz diff --git a/dns/bind9-devel/Makefile b/dns/bind9-devel/Makefile index fea2b2dcee5..1c510c890f7 100644 --- a/dns/bind9-devel/Makefile +++ b/dns/bind9-devel/Makefile @@ -49,13 +49,13 @@ RUN_DEPENDS= bind-tools>0:dns/bind-tools # XXX: remove tar:bz2 USES= autoreconf compiler:c11 cpe libedit libtool pkgconfig ssl tar:bz2 # ISC releases things like 9.8.0-P1, which our versioning doesn't like -ISCVERSION= 9.17.22a0.2022.02.09 +ISCVERSION= 9.17.22a0.2022.03.25 # XXX: Remove gitlab USE_GITLAB= yes GL_SITE= https://gitlab.isc.org GL_ACCOUNT= isc-projects GL_PROJECT= bind9 -GL_COMMIT= 59c3b17ad0b7590350b516eff62abde6a1f4382b +GL_COMMIT= 23cb022247e414bb99d901ed5de0f8f0bc9b9b90 CPE_VENDOR= isc CPE_VERSION= ${ISCVERSION:C/-.*//} @@ -126,7 +126,7 @@ TCP_FASTOPEN_DESC= RFC 7413 support DOCS_ALL_TARGET= all html DOCS_BUILD_DEPENDS= sphinx-build:textproc/py-sphinx \ ${PYTHON_PKGNAMEPREFIX}sphinx_rtd_theme>0:textproc/py-sphinx_rtd_theme@${PY_FLAVOR} -DOCS_USES= python:env +DOCS_USES= gmake python:env DNSTAP_CONFIGURE_ENABLE= dnstap DNSTAP_LIB_DEPENDS= libfstrm.so:devel/fstrm \ @@ -168,6 +168,7 @@ LMDB_CONFIGURE_WITH= lmdb=${LOCALBASE} LMDB_LIB_DEPENDS= liblmdb.so:databases/lmdb MANPAGES_BUILD_DEPENDS= sphinx-build:textproc/py-sphinx +MANPAGES_USES= gmake OVERRIDECACHE_EXTRA_PATCHES= ${FILESDIR}/extrapatch-bind-min-override-ttl diff --git a/dns/bind9-devel/distinfo b/dns/bind9-devel/distinfo index e72f4dee7f3..e1d0a8a9006 100644 --- a/dns/bind9-devel/distinfo +++ b/dns/bind9-devel/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1644501345 -SHA256 (isc-projects-bind9-59c3b17ad0b7590350b516eff62abde6a1f4382b_GL0.tar.gz) = c6c61f713aeb06dba96b7bb5858564b3f5624cb2e60aec59dae35b3ef2d72bfe -SIZE (isc-projects-bind9-59c3b17ad0b7590350b516eff62abde6a1f4382b_GL0.tar.gz) = 6153436 +TIMESTAMP = 1648457921 +SHA256 (isc-projects-bind9-23cb022247e414bb99d901ed5de0f8f0bc9b9b90_GL0.tar.gz) = 524add3da7991b7987e90d5d6450db0c41dfc8cab087d1c600371caaa26b0f0a +SIZE (isc-projects-bind9-23cb022247e414bb99d901ed5de0f8f0bc9b9b90_GL0.tar.gz) = 6155737 diff --git a/dns/bind9-devel/files/extrapatch-bind-min-override-ttl b/dns/bind9-devel/files/extrapatch-bind-min-override-ttl index 5cc994079bc..ebb596e99f5 100644 --- a/dns/bind9-devel/files/extrapatch-bind-min-override-ttl +++ b/dns/bind9-devel/files/extrapatch-bind-min-override-ttl @@ -1,8 +1,8 @@ Add the override-cache-ttl feature. ---- bin/named/config.c.orig 2022-02-09 16:40:14 UTC +--- bin/named/config.c.orig 2022-03-25 15:13:28 UTC +++ bin/named/config.c -@@ -172,6 +172,7 @@ options {\n\ +@@ -171,6 +171,7 @@ options {\n\ notify-source *;\n\ notify-source-v6 *;\n\ nsec3-test-zone no;\n\ @@ -10,9 +10,9 @@ Add the override-cache-ttl feature. parental-source *;\n\ parental-source-v6 *;\n\ provide-ixfr true;\n\ ---- bin/named/server.c.orig 2022-02-09 16:40:14 UTC +--- bin/named/server.c.orig 2022-03-25 15:13:28 UTC +++ bin/named/server.c -@@ -4496,6 +4496,11 @@ configure_view(dns_view_t *view, dns_viewlist_t *viewl +@@ -4484,6 +4484,11 @@ configure_view(dns_view_t *view, dns_viewlist_t *viewl } obj = NULL; @@ -24,7 +24,7 @@ Add the override-cache-ttl feature. result = named_config_get(maps, "max-cache-ttl", &obj); INSIST(result == ISC_R_SUCCESS); view->maxcachettl = cfg_obj_asduration(obj); ---- lib/dns/include/dns/view.h.orig 2022-02-09 16:40:14 UTC +--- lib/dns/include/dns/view.h.orig 2022-03-25 15:13:28 UTC +++ lib/dns/include/dns/view.h @@ -155,6 +155,7 @@ struct dns_view { bool requestnsid; @@ -34,9 +34,9 @@ Add the override-cache-ttl feature. dns_ttl_t maxncachettl; dns_ttl_t mincachettl; dns_ttl_t minncachettl; ---- lib/dns/resolver.c.orig 2022-02-09 16:40:14 UTC +--- lib/dns/resolver.c.orig 2022-03-25 15:13:28 UTC +++ lib/dns/resolver.c -@@ -6119,6 +6119,12 @@ cache_name(fetchctx_t *fctx, dns_name_t *name, dns_mes +@@ -6126,6 +6126,12 @@ cache_name(fetchctx_t *fctx, dns_name_t *name, dns_mes } /* @@ -49,9 +49,9 @@ Add the override-cache-ttl feature. * Enforce the configure maximum cache TTL. */ if (rdataset->ttl > res->view->maxcachettl) { ---- lib/isccfg/namedconf.c.orig 2022-02-09 16:40:14 UTC +--- lib/isccfg/namedconf.c.orig 2022-03-25 15:13:28 UTC +++ lib/isccfg/namedconf.c -@@ -2086,6 +2086,7 @@ static cfg_clausedef_t view_clauses[] = { +@@ -2092,6 +2092,7 @@ static cfg_clausedef_t view_clauses[] = { #endif /* ifdef HAVE_LMDB */ { "max-acache-size", NULL, CFG_CLAUSEFLAG_ANCIENT }, { "max-cache-size", &cfg_type_sizeorpercent, 0 }, diff --git a/dns/bind9-devel/files/patch-configure.ac b/dns/bind9-devel/files/patch-configure.ac index 00ad7146080..ecb07886e57 100644 --- a/dns/bind9-devel/files/patch-configure.ac +++ b/dns/bind9-devel/files/patch-configure.ac @@ -6,8 +6,8 @@ automake has warnings, it is ok here. AC_CANONICAL_TARGET AC_CONFIG_SRCDIR([bin/named/main.c]) --AM_INIT_AUTOMAKE([1.9 tar-pax foreign subdir-objects dist-xz no-dist-gzip -Wall -Werror]) -+AM_INIT_AUTOMAKE([1.9 tar-pax foreign subdir-objects dist-xz no-dist-gzip -Wall]) +-AM_INIT_AUTOMAKE([1.14 tar-pax foreign subdir-objects dist-xz no-dist-gzip -Wall -Werror]) ++AM_INIT_AUTOMAKE([1.14 tar-pax foreign subdir-objects dist-xz no-dist-gzip -Wall]) AM_SILENT_RULES([yes]) AM_EXTRA_RECURSIVE_TARGETS([test unit doc]) diff --git a/dns/bind9-devel/pkg-plist b/dns/bind9-devel/pkg-plist index 90bb575d5d4..1eddc578ecc 100644 --- a/dns/bind9-devel/pkg-plist +++ b/dns/bind9-devel/pkg-plist @@ -97,7 +97,6 @@ include/dns/soa.h include/dns/ssu.h include/dns/stats.h include/dns/time.h -include/dns/timer.h include/dns/tkey.h include/dns/transport.h include/dns/tsec.h @@ -149,7 +148,6 @@ include/isc/hash.h include/isc/heap.h include/isc/hex.h include/isc/hmac.h -include/isc/hp.h include/isc/ht.h include/isc/httpd.h include/isc/interfaceiter.h @@ -178,7 +176,6 @@ include/isc/parseint.h include/isc/pool.h include/isc/portset.h include/isc/print.h -include/isc/queue.h include/isc/quota.h include/isc/radix.h include/isc/random.h diff --git a/dns/bind911/Makefile b/dns/bind911/Makefile index 9b0bd71141f..d6ca618d254 100644 --- a/dns/bind911/Makefile +++ b/dns/bind911/Makefile @@ -2,7 +2,7 @@ PORTNAME= bind PORTVERSION= ${ISCVERSION:S/-P/P/:S/b/.b/:S/a/.a/:S/rc/.rc/} -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= dns net MASTER_SITES= ISC/bind9/${ISCVERSION} PKGNAMESUFFIX= 911 diff --git a/dns/bind918/Makefile b/dns/bind918/Makefile index 3778ae83813..b3e042e2481 100644 --- a/dns/bind918/Makefile +++ b/dns/bind918/Makefile @@ -112,7 +112,7 @@ TCP_FASTOPEN_DESC= RFC 7413 support DOCS_ALL_TARGET= all html DOCS_BUILD_DEPENDS= sphinx-build:textproc/py-sphinx \ ${PYTHON_PKGNAMEPREFIX}sphinx_rtd_theme>0:textproc/py-sphinx_rtd_theme@${PY_FLAVOR} -DOCS_USES= python:env +DOCS_USES= gmake python:env DNSTAP_CONFIGURE_ENABLE= dnstap DNSTAP_LIB_DEPENDS= libfstrm.so:devel/fstrm \ @@ -154,6 +154,7 @@ LMDB_CONFIGURE_WITH= lmdb=${LOCALBASE} LMDB_LIB_DEPENDS= liblmdb.so:databases/lmdb MANPAGES_BUILD_DEPENDS= sphinx-build:textproc/py-sphinx +MANPAGES_USES= gmake OVERRIDECACHE_EXTRA_PATCHES= ${FILESDIR}/extrapatch-bind-min-override-ttl diff --git a/dns/bind918/pkg-plist b/dns/bind918/pkg-plist index 25122ba923b..4698d893698 100644 --- a/dns/bind918/pkg-plist +++ b/dns/bind918/pkg-plist @@ -96,7 +96,6 @@ include/dns/soa.h include/dns/ssu.h include/dns/stats.h include/dns/time.h -include/dns/timer.h include/dns/tkey.h include/dns/transport.h include/dns/tsec.h @@ -245,19 +244,19 @@ include/ns/xfrout.h lib/bind/filter-a.so lib/bind/filter-aaaa.so lib/libbind9.so -lib/libbind9-9.18.0.so +lib/libbind9-9.18.1.so lib/libdns.so -lib/libdns-9.18.0.so +lib/libdns-9.18.1.so lib/libirs.so -lib/libirs-9.18.0.so +lib/libirs-9.18.1.so lib/libisc.so -lib/libisc-9.18.0.so +lib/libisc-9.18.1.so lib/libisccc.so -lib/libisccc-9.18.0.so +lib/libisccc-9.18.1.so lib/libisccfg.so -lib/libisccfg-9.18.0.so +lib/libisccfg-9.18.1.so lib/libns.so -lib/libns-9.18.0.so +lib/libns-9.18.1.so @comment man/man1/arpaname.1.gz @comment man/man1/delv.1.gz @comment man/man1/dig.1.gz diff --git a/dns/dnscap/Makefile b/dns/dnscap/Makefile index 76751d4557b..6204803d63d 100644 --- a/dns/dnscap/Makefile +++ b/dns/dnscap/Makefile @@ -1,8 +1,7 @@ # Created by: Edwin Groothuis PORTNAME= dnscap -PORTVERSION= 2.0.1 -PORTREVISION= 1 +PORTVERSION= 2.0.2 PORTEPOCH= 1 CATEGORIES= dns MASTER_SITES= https://www.dns-oarc.net/files/dnscap/ \ diff --git a/dns/dnscap/distinfo b/dns/dnscap/distinfo index afd8f7053ed..879478d897a 100644 --- a/dns/dnscap/distinfo +++ b/dns/dnscap/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620405347 -SHA256 (dnscap-2.0.1.tar.gz) = 1000d26ea24df06391acccd6663d98532225bf08dec738db1610616f0b480350 -SIZE (dnscap-2.0.1.tar.gz) = 732934 +TIMESTAMP = 1647264428 +SHA256 (dnscap-2.0.2.tar.gz) = 9071edf92b07cd9e1e454fe49b3acc14fa9204d31aaa8744edb4bd60926497c1 +SIZE (dnscap-2.0.2.tar.gz) = 733381 diff --git a/dns/opendnssec2/Makefile b/dns/opendnssec2/Makefile index a3518058757..91782f3549f 100644 --- a/dns/opendnssec2/Makefile +++ b/dns/opendnssec2/Makefile @@ -2,6 +2,7 @@ PORTNAME= opendnssec DISTVERSION= 2.1.10 +PORTREVISION= 1 CATEGORIES= dns MASTER_SITES= http://dist.opendnssec.org/source/ PKGNAMESUFFIX= 2 diff --git a/dns/py-publicsuffixlist/Makefile b/dns/py-publicsuffixlist/Makefile index 86ea6f29b81..9158803f681 100644 --- a/dns/py-publicsuffixlist/Makefile +++ b/dns/py-publicsuffixlist/Makefile @@ -1,8 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= publicsuffixlist -PORTVERSION= 0.7.11 -PORTREVISION= 2 +PORTVERSION= 0.7.12 CATEGORIES= dns python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/dns/py-publicsuffixlist/distinfo b/dns/py-publicsuffixlist/distinfo index 14299405112..59125f7d9de 100644 --- a/dns/py-publicsuffixlist/distinfo +++ b/dns/py-publicsuffixlist/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641045978 -SHA256 (publicsuffixlist-0.7.11.tar.gz) = 27ab1ab44887a06a0b0f7cd92b49fc40dad46a662baa8887aec7703cf314e62b -SIZE (publicsuffixlist-0.7.11.tar.gz) = 92891 +TIMESTAMP = 1647264588 +SHA256 (publicsuffixlist-0.7.12.tar.gz) = 52952e8432cd7fcbbc6174196e55e3d6e3d89c96255fee99b9c1db6012db797a +SIZE (publicsuffixlist-0.7.12.tar.gz) = 93966 diff --git a/editors/abiword/Makefile b/editors/abiword/Makefile index 11d7bea7b14..5426dfc105d 100644 --- a/editors/abiword/Makefile +++ b/editors/abiword/Makefile @@ -2,6 +2,7 @@ PORTNAME= abiword PORTVERSION= 3.0.5 +PORTREVISION= 1 CATEGORIES= editors MASTER_SITES= http://www.abisource.com/downloads/abiword/${PORTVERSION}/source/ DIST_SUBDIR= AbiWord diff --git a/editors/diamond/Makefile b/editors/diamond/Makefile index 460e476cae8..c38e2a60e2a 100644 --- a/editors/diamond/Makefile +++ b/editors/diamond/Makefile @@ -1,5 +1,6 @@ PORTNAME= diamond DISTVERSION= 1.3.7 +PORTREVISION= 1 CATEGORIES= editors MASTER_SITES= https://download.copperspice.com/${PORTNAME}/source/ PKGNAMESUFFIX= -cs diff --git a/editors/elementary-code/Makefile b/editors/elementary-code/Makefile index f1f5c8ef579..d9514fb91a8 100644 --- a/editors/elementary-code/Makefile +++ b/editors/elementary-code/Makefile @@ -1,6 +1,6 @@ PORTNAME= elementary-code DISTVERSION= 6.0.0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= editors PATCH_SITES= https://github.com/${GH_ACCOUNT}/${GH_PROJECT}/commit/ diff --git a/editors/emacs-devel/Makefile b/editors/emacs-devel/Makefile index 375415dc87b..a12930dc1c1 100644 --- a/editors/emacs-devel/Makefile +++ b/editors/emacs-devel/Makefile @@ -2,6 +2,7 @@ PORTNAME= emacs DISTVERSION= 29.0.50.20220315 +PORTREVISION= 1 PORTEPOCH= 2 CATEGORIES= editors PKGNAMESUFFIX= -devel diff --git a/editors/gedit-plugins/Makefile b/editors/gedit-plugins/Makefile index f3d794b2b15..8f44283e0db 100644 --- a/editors/gedit-plugins/Makefile +++ b/editors/gedit-plugins/Makefile @@ -2,6 +2,7 @@ PORTNAME= gedit-plugins PORTVERSION= 41.0 +PORTREVISION= 1 CATEGORIES= editors gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/editors/gedit/Makefile b/editors/gedit/Makefile index 66101b59501..4645620b843 100644 --- a/editors/gedit/Makefile +++ b/editors/gedit/Makefile @@ -2,7 +2,7 @@ PORTNAME= gedit PORTVERSION= 41.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= editors gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/editors/ghostwriter/Makefile b/editors/ghostwriter/Makefile index 5f7c7e34edf..8e2ace8d8aa 100644 --- a/editors/ghostwriter/Makefile +++ b/editors/ghostwriter/Makefile @@ -1,9 +1,8 @@ PORTNAME= ghostwriter -DISTVERSIONPREFIX= v -DISTVERSION= 1.8.1 +DISTVERSION= 2.1.2 CATEGORIES= editors -MAINTAINER= ports@FreeBSD.org +MAINTAINER= madpilot@FreeBSD.org COMMENT= Distraction-free Markdown editor LICENSE= GPLv3+ @@ -20,4 +19,10 @@ USE_QT= core gui svg webchannel webengine widgets buildtools_build \ # cf. PR 224488 LDFLAGS+= -Wl,--as-needed +post-configure: + @${REINPLACE_CMD} -e 's|^\(build/release/qrc_QtAwesomeFree.cpp:[^\\]*\)\\|\1|' \ + -e '/3rdparty\/QtAwesome\/fonts\/Font/d' \ + -e '/\/lib\/qt5\/bin\/rcc \\/d' \ + ${WRKSRC}/Makefile + .include diff --git a/editors/ghostwriter/distinfo b/editors/ghostwriter/distinfo index ebe99a81b28..970ff8bf6a2 100644 --- a/editors/ghostwriter/distinfo +++ b/editors/ghostwriter/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1582434096 -SHA256 (wereturtle-ghostwriter-v1.8.1_GH0.tar.gz) = 2b73fed6a95ebcd45333fe4b12acb491eb4171fca8a11eea1af9bbdc73ed4b69 -SIZE (wereturtle-ghostwriter-v1.8.1_GH0.tar.gz) = 1220000 +TIMESTAMP = 1648229424 +SHA256 (wereturtle-ghostwriter-2.1.2_GH0.tar.gz) = 65aa523a9c3d4f4c68f2cb6f5ab600cf24c98b4f0673a622d757a8e28eab120f +SIZE (wereturtle-ghostwriter-2.1.2_GH0.tar.gz) = 7722873 diff --git a/editors/ghostwriter/files/patch-src_MarkdownEditor.cpp b/editors/ghostwriter/files/patch-src_MarkdownEditor.cpp deleted file mode 100644 index 89c3e711485..00000000000 --- a/editors/ghostwriter/files/patch-src_MarkdownEditor.cpp +++ /dev/null @@ -1,10 +0,0 @@ ---- src/MarkdownEditor.cpp.orig 2020-05-21 07:40:36 UTC -+++ src/MarkdownEditor.cpp -@@ -34,6 +34,7 @@ - #include - #include - #include -+#include - #include - #include - diff --git a/editors/ghostwriter/pkg-plist b/editors/ghostwriter/pkg-plist index 199d1c40018..15bcc8f0704 100644 --- a/editors/ghostwriter/pkg-plist +++ b/editors/ghostwriter/pkg-plist @@ -1,7 +1,7 @@ bin/ghostwriter -share/appdata/ghostwriter.appdata.xml share/applications/ghostwriter.desktop %%DATADIR%%/translations/ghostwriter_ar.qm +%%DATADIR%%/translations/ghostwriter_ca.qm %%DATADIR%%/translations/ghostwriter_cs.qm %%DATADIR%%/translations/ghostwriter_de.qm %%DATADIR%%/translations/ghostwriter_en.qm @@ -9,10 +9,12 @@ share/applications/ghostwriter.desktop %%DATADIR%%/translations/ghostwriter_fr.qm %%DATADIR%%/translations/ghostwriter_it.qm %%DATADIR%%/translations/ghostwriter_ja.qm +%%DATADIR%%/translations/ghostwriter_nb_NO.qm %%DATADIR%%/translations/ghostwriter_nl.qm %%DATADIR%%/translations/ghostwriter_pl.qm %%DATADIR%%/translations/ghostwriter_pt_BR.qm %%DATADIR%%/translations/ghostwriter_ru.qm +%%DATADIR%%/translations/ghostwriter_sv.qm %%DATADIR%%/translations/ghostwriter_zh.qm share/icons/hicolor/128x128/apps/ghostwriter.png share/icons/hicolor/16x16/apps/ghostwriter.png @@ -24,4 +26,4 @@ share/icons/hicolor/48x48/apps/ghostwriter.png share/icons/hicolor/64x64/apps/ghostwriter.png share/icons/hicolor/scalable/apps/ghostwriter.svg share/man/man1/ghostwriter.1.gz -share/pixmaps/ghostwriter.xpm +share/metainfo/ghostwriter.appdata.xml diff --git a/editors/gnome-latex/Makefile b/editors/gnome-latex/Makefile index 35ab4d7c0b5..37d77b488ce 100644 --- a/editors/gnome-latex/Makefile +++ b/editors/gnome-latex/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-latex PORTVERSION= 3.38.0 +PORTREVISION= 1 CATEGORIES= editors MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/editors/gobby/Makefile b/editors/gobby/Makefile index 2bbb0f37a61..f8a1d884403 100644 --- a/editors/gobby/Makefile +++ b/editors/gobby/Makefile @@ -1,6 +1,6 @@ PORTNAME= gobby PORTVERSION= 0.4.13 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= editors MASTER_SITES= http://releases.0x539.de/gobby/ diff --git a/editors/gummi/Makefile b/editors/gummi/Makefile index 8f1afd055f1..3a289763f6e 100644 --- a/editors/gummi/Makefile +++ b/editors/gummi/Makefile @@ -2,7 +2,7 @@ PORTNAME= gummi PORTVERSION= 0.8.1 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= editors MASTER_SITES= https://github.com/alexandervdm/gummi/releases/download/${PORTVERSION}/ diff --git a/editors/jucipp/Makefile b/editors/jucipp/Makefile index 53ed3807f24..927af6d9fd3 100644 --- a/editors/jucipp/Makefile +++ b/editors/jucipp/Makefile @@ -3,7 +3,7 @@ PORTNAME= jucipp DISTVERSIONPREFIX= v DISTVERSION= 1.7.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= editors MAINTAINER= info@babaei.net diff --git a/editors/libreoffice/Makefile b/editors/libreoffice/Makefile index bd0562f45f8..4cadb72936b 100644 --- a/editors/libreoffice/Makefile +++ b/editors/libreoffice/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 3 +PORTREVISION= 4 .include "${.CURDIR}/Makefile.common" diff --git a/editors/libreoffice6/Makefile b/editors/libreoffice6/Makefile index d850a94ff07..d1475911e42 100644 --- a/editors/libreoffice6/Makefile +++ b/editors/libreoffice6/Makefile @@ -1,5 +1,5 @@ -PORTREVISION= 20 +PORTREVISION= 21 .include "${.CURDIR}/Makefile.common" diff --git a/editors/marker/Makefile b/editors/marker/Makefile index af5c72ac5a1..6f26f29f05d 100644 --- a/editors/marker/Makefile +++ b/editors/marker/Makefile @@ -1,5 +1,6 @@ PORTNAME= marker PORTVERSION= 2020.04.04.2 +PORTREVISION= 1 CATEGORIES= editors MAINTAINER= tagattie@FreeBSD.org diff --git a/editors/morla/Makefile b/editors/morla/Makefile index 224716ae5ff..8fc7fbef338 100644 --- a/editors/morla/Makefile +++ b/editors/morla/Makefile @@ -2,7 +2,7 @@ PORTNAME= morla PORTVERSION= 0.16.1 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= editors textproc MASTER_SITES= http://www.morlardf.net/src/ \ http://nivi.interfree.it/distfiles/${PORTNAME}/${PORTVERSION}/ diff --git a/editors/mousepad/Makefile b/editors/mousepad/Makefile index 1d744e08746..5836601c19e 100644 --- a/editors/mousepad/Makefile +++ b/editors/mousepad/Makefile @@ -2,6 +2,7 @@ PORTNAME= mousepad PORTVERSION= 0.5.8 +PORTREVISION= 1 CATEGORIES= editors xfce MASTER_SITES= XFCE/apps DIST_SUBDIR= xfce4 diff --git a/editors/openoffice-4/Makefile b/editors/openoffice-4/Makefile index c1dfd16585d..7b017844174 100644 --- a/editors/openoffice-4/Makefile +++ b/editors/openoffice-4/Makefile @@ -2,7 +2,7 @@ PORTNAME= apache-openoffice PORTVERSION= ${AOOVERSION} -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= editors java MASTER_SITES= https://dlcdn.apache.org/openoffice/${PORTVERSION}/source/ \ https://archive.apache.org/dist/openoffice/${PORTVERSION}/source/ \ diff --git a/editors/openoffice-devel/Makefile b/editors/openoffice-devel/Makefile index 04f83a5bc9a..6240842c0e5 100644 --- a/editors/openoffice-devel/Makefile +++ b/editors/openoffice-devel/Makefile @@ -2,7 +2,7 @@ PORTNAME= apache-openoffice PORTVERSION= ${AOOVERSION1}.${AOOVERSION2}.${TIMESTAMP} -PORTREVISION= 2 +PORTREVISION= 3 PORTEPOCH= 4 CATEGORIES= editors java MASTER_SITES= https://dist.apache.org/repos/dist/dev/openoffice/${AOOVERSION}-${AOORC}-${TIMESTAMP}/source/ \ diff --git a/editors/pluma-plugins/Makefile b/editors/pluma-plugins/Makefile index bbec58caed5..5f167bb7c0b 100644 --- a/editors/pluma-plugins/Makefile +++ b/editors/pluma-plugins/Makefile @@ -2,7 +2,7 @@ PORTNAME= pluma-plugins PORTVERSION= 1.26.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= editors mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/editors/pluma/Makefile b/editors/pluma/Makefile index 3d13e85021d..8664f31ed25 100644 --- a/editors/pluma/Makefile +++ b/editors/pluma/Makefile @@ -2,7 +2,7 @@ PORTNAME= pluma PORTVERSION= 1.26.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= editors mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/editors/quilter/Makefile b/editors/quilter/Makefile index e28ee638f00..639b972c230 100644 --- a/editors/quilter/Makefile +++ b/editors/quilter/Makefile @@ -1,6 +1,6 @@ PORTNAME= quilter DISTVERSION= 2.2.3 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= editors MAINTAINER= yuri@FreeBSD.org diff --git a/editors/setzer/Makefile b/editors/setzer/Makefile index 10c121142a6..4abd26a8f68 100644 --- a/editors/setzer/Makefile +++ b/editors/setzer/Makefile @@ -1,7 +1,7 @@ PORTNAME= setzer DISTVERSIONPREFIX=v DISTVERSION= 0.4.1 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= editors MAINTAINER= greg@unrelenting.technology diff --git a/editors/vscode/Makefile b/editors/vscode/Makefile index 397fec7e90a..b1bef164d16 100644 --- a/editors/vscode/Makefile +++ b/editors/vscode/Makefile @@ -1,5 +1,6 @@ PORTNAME= vscode DISTVERSION= 1.64.2 +PORTREVISION= 1 CATEGORIES= editors MASTER_SITES= https://registry.npmjs.org/esbuild-freebsd-64/-/:esbuild_binary \ https://nodejs.org/dist/v${NODE_VER}/:node_headers \ diff --git a/editors/xed/Makefile b/editors/xed/Makefile index a81b5056a42..5cd94a8795e 100644 --- a/editors/xed/Makefile +++ b/editors/xed/Makefile @@ -2,6 +2,7 @@ PORTNAME= xed PORTVERSION= 3.2.2 +PORTREVISION= 1 CATEGORIES= editors gnome DIST_SUBDIR= gnome diff --git a/editors/xmlcopyeditor/Makefile b/editors/xmlcopyeditor/Makefile index d66c7370421..6ca5661a223 100644 --- a/editors/xmlcopyeditor/Makefile +++ b/editors/xmlcopyeditor/Makefile @@ -1,5 +1,6 @@ PORTNAME= xmlcopyeditor PORTVERSION= 1.3.0.0 +PORTREVISION= 1 CATEGORIES= editors MASTER_SITES= SF/xml-copy-editor/${PORTNAME}-linux/${PORTVERSION} diff --git a/emulators/catapult/Makefile b/emulators/catapult/Makefile index 4d5eaa9a659..2f10c0d34bf 100644 --- a/emulators/catapult/Makefile +++ b/emulators/catapult/Makefile @@ -1,5 +1,6 @@ PORTNAME= catapult PORTVERSION= 17.0 +PORTREVISION= 1 CATEGORIES= emulators MASTER_SITES= https://github.com/openMSX/openMSX/releases/download/RELEASE_${PORTVERSION:S/./_/g}/ DISTNAME= openmsx-${PORTNAME}-${PORTVERSION} diff --git a/emulators/fuse/Makefile b/emulators/fuse/Makefile index 917e4904773..8099740c1a1 100644 --- a/emulators/fuse/Makefile +++ b/emulators/fuse/Makefile @@ -2,6 +2,7 @@ PORTNAME= fuse PORTVERSION= 1.6.0 +PORTREVISION= 1 CATEGORIES= emulators MASTER_SITES= SF/${PORTNAME}-emulator/${PORTNAME}/${PORTVERSION} diff --git a/emulators/higan/Makefile b/emulators/higan/Makefile index bd5fd2d0685..7939201d222 100644 --- a/emulators/higan/Makefile +++ b/emulators/higan/Makefile @@ -2,7 +2,7 @@ PORTNAME= higan PORTVERSION= 106 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= emulators games MASTER_SITES= http://download.byuu.org/ \ http://www.cyberbotx.com/higan/ diff --git a/emulators/nemu/Makefile b/emulators/nemu/Makefile index c3cda75572b..d8ce9868f72 100644 --- a/emulators/nemu/Makefile +++ b/emulators/nemu/Makefile @@ -1,6 +1,7 @@ PORTNAME= nemu DISTVERSIONPREFIX= v DISTVERSION= 3.0.0 +PORTREVISION= 1 CATEGORIES= emulators MAINTAINER= arrowd@FreeBSD.org diff --git a/emulators/qemu-cheri/Makefile b/emulators/qemu-cheri/Makefile index 5116ddcdd8e..4aef8ec2208 100644 --- a/emulators/qemu-cheri/Makefile +++ b/emulators/qemu-cheri/Makefile @@ -1,6 +1,6 @@ PORTNAME= qemu PORTVERSION= 0.d${SNAPDATE} -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= emulators devel PKGNAMESUFFIX= -cheri diff --git a/emulators/qemu-devel/Makefile b/emulators/qemu-devel/Makefile index 58995619709..682f3aa8f3c 100644 --- a/emulators/qemu-devel/Makefile +++ b/emulators/qemu-devel/Makefile @@ -2,6 +2,7 @@ PORTNAME= qemu DISTVERSION= 6.1.0.20210928 +PORTREVISION= 1 CATEGORIES= emulators PKGNAMESUFFIX= -devel DIST_SUBDIR= qemu/${PORTVERSION} diff --git a/emulators/qemu-powernv/Makefile b/emulators/qemu-powernv/Makefile index ac66bc4ca73..3cdb675f498 100644 --- a/emulators/qemu-powernv/Makefile +++ b/emulators/qemu-powernv/Makefile @@ -1,6 +1,6 @@ PORTNAME= qemu-powernv PORTVERSION= 3.0.50 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= emulators MASTER_SITES= http://people.freebsd.org/~kbowling/distfiles/ diff --git a/emulators/qemu5/Makefile b/emulators/qemu5/Makefile index 484d0c006c0..c48dd4329e4 100644 --- a/emulators/qemu5/Makefile +++ b/emulators/qemu5/Makefile @@ -1,5 +1,6 @@ PORTNAME= qemu DISTVERSION= 5.2.0 +PORTREVISION= 1 CATEGORIES= emulators PKGNAMESUFFIX= 5 MASTER_SITES= https://download.qemu.org/ diff --git a/emulators/snes9x-gtk/Makefile b/emulators/snes9x-gtk/Makefile index e1e666522ca..50a8349938c 100644 --- a/emulators/snes9x-gtk/Makefile +++ b/emulators/snes9x-gtk/Makefile @@ -2,7 +2,7 @@ PORTNAME= snes9x PORTVERSION= 1.54.1 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= emulators PKGNAMESUFFIX= -gtk diff --git a/emulators/tiemu3/Makefile b/emulators/tiemu3/Makefile index 3327e7fc114..2201d0387f3 100644 --- a/emulators/tiemu3/Makefile +++ b/emulators/tiemu3/Makefile @@ -2,7 +2,7 @@ PORTNAME= tiemu3 PORTVERSION= 3.03 -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= emulators MASTER_SITES= SF/gtktiemu/tiemu-linux/TIEmu%20${DISTVERSION} DISTNAME= tiemu-${DISTVERSION} diff --git a/emulators/virtualbox-ose-legacy/Makefile b/emulators/virtualbox-ose-legacy/Makefile index ae08c1b6bc9..af10a42a9ce 100644 --- a/emulators/virtualbox-ose-legacy/Makefile +++ b/emulators/virtualbox-ose-legacy/Makefile @@ -2,7 +2,7 @@ PORTNAME= virtualbox-ose PORTVERSION= 5.2.44 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= emulators MASTER_SITES= https://download.oracle.com/virtualbox/${PORTVERSION}/ PKGNAMESUFFIX?= -legacy diff --git a/emulators/virtualbox-ose-nox11-legacy/Makefile b/emulators/virtualbox-ose-nox11-legacy/Makefile index 15c46316857..7aec5a3aceb 100644 --- a/emulators/virtualbox-ose-nox11-legacy/Makefile +++ b/emulators/virtualbox-ose-nox11-legacy/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 PKGNAMESUFFIX= -nox11-legacy OPTIONS_EXCLUDE= ALSA DBUS DEBUG GUESTADDITIONS MANUAL NLS PULSEAUDIO diff --git a/emulators/virtualbox-ose-nox11/Makefile b/emulators/virtualbox-ose-nox11/Makefile index ed9100f7f09..2987a462881 100644 --- a/emulators/virtualbox-ose-nox11/Makefile +++ b/emulators/virtualbox-ose-nox11/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 PKGNAMESUFFIX= -nox11 OPTIONS_EXCLUDE= ALSA DBUS DEBUG GUESTADDITIONS MANUAL NLS PULSEAUDIO \ diff --git a/emulators/virtualbox-ose/Makefile b/emulators/virtualbox-ose/Makefile index b1c34f71bd6..4f4b1f6ee72 100644 --- a/emulators/virtualbox-ose/Makefile +++ b/emulators/virtualbox-ose/Makefile @@ -2,7 +2,7 @@ PORTNAME= virtualbox-ose PORTVERSION= 6.1.32 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= emulators MASTER_SITES= https://download.virtualbox.org/virtualbox/${PORTVERSION}/ DISTFILES= VirtualBox-${PORTVERSION}${EXTRACT_SUFX} ${GUESTADDITIONS} diff --git a/emulators/wine-proton/Makefile b/emulators/wine-proton/Makefile index 8e685414b0f..c322b4f65c8 100644 --- a/emulators/wine-proton/Makefile +++ b/emulators/wine-proton/Makefile @@ -1,6 +1,6 @@ PORTNAME= wine-proton DISTVERSION= 6.3-2 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= emulators MAINTAINER= iwtcex@gmail.com diff --git a/emulators/wine/Makefile b/emulators/wine/Makefile index bcc954cfc58..320efed7045 100644 --- a/emulators/wine/Makefile +++ b/emulators/wine/Makefile @@ -2,6 +2,7 @@ PORTNAME= wine DISTVERSION= 6.0.3 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= emulators MASTER_SITES= SF/${PORTNAME}/Source \ diff --git a/emulators/yuzu/Makefile b/emulators/yuzu/Makefile index 409ea57d8c3..cf3380d6024 100644 --- a/emulators/yuzu/Makefile +++ b/emulators/yuzu/Makefile @@ -1,5 +1,5 @@ PORTNAME= yuzu -PORTVERSION= s20220322 +PORTVERSION= s20220325 CATEGORIES= emulators .if make(makesum) MASTER_SITES= https://api.yuzu-emu.org/gamedb/?dummy=/:gamedb @@ -42,7 +42,7 @@ TEST_DEPENDS= catch>0:devel/catch USE_GITHUB= yes GH_ACCOUNT= yuzu-emu -GH_TAGNAME= fb4d80b16 +GH_TAGNAME= da46d924e GH_TUPLE= yuzu-emu:mbedtls:v2.16.9-115-g8c88150ca:mbedtls/externals/mbedtls \ KhronosGroup:SPIRV-Headers:1.5.4.raytracing.fixed-32-ga3fdfe8:SPIRV_Headers/externals/sirit/externals/SPIRV-Headers \ KhronosGroup:Vulkan-Headers:v1.2.202:Vulkan_Headers/externals/Vulkan-Headers \ diff --git a/emulators/yuzu/distinfo b/emulators/yuzu/distinfo index e0525f05990..fdb835b7aa5 100644 --- a/emulators/yuzu/distinfo +++ b/emulators/yuzu/distinfo @@ -1,8 +1,8 @@ -TIMESTAMP = 1648067462 +TIMESTAMP = 1648224580 SHA256 (yuzu/compatibility_list.json) = 6a73a63bb36c7070ec4314f4dfe61b244c819178e171254680aa43c2dfff187c SIZE (yuzu/compatibility_list.json) = 1255189 -SHA256 (yuzu-emu-yuzu-s20220322-fb4d80b16_GH0.tar.gz) = a35b43882d8dfc44030f0bbb4cd12a7f9341dadf75fc43ba939d2628a837c94d -SIZE (yuzu-emu-yuzu-s20220322-fb4d80b16_GH0.tar.gz) = 4637069 +SHA256 (yuzu-emu-yuzu-s20220325-da46d924e_GH0.tar.gz) = 2d4f59402a03527dcaf56bfef687c46f6f16ca79d521493bf0c49b8936239841 +SIZE (yuzu-emu-yuzu-s20220325-da46d924e_GH0.tar.gz) = 4659921 SHA256 (yuzu-emu-mbedtls-v2.16.9-115-g8c88150ca_GH0.tar.gz) = 8cd6d075b4da0ad5fb995eb37390e2e6088be8d41ab1cdfc7e7e4256bd991450 SIZE (yuzu-emu-mbedtls-v2.16.9-115-g8c88150ca_GH0.tar.gz) = 2679189 SHA256 (KhronosGroup-SPIRV-Headers-1.5.4.raytracing.fixed-32-ga3fdfe8_GH0.tar.gz) = bd629d6296dd374eb2aeff923c75895ba0f3ce6448dad89763930e65b954e0cb diff --git a/finance/gnucash-docs/Makefile b/finance/gnucash-docs/Makefile index 795b731b7ef..ad611d4d323 100644 --- a/finance/gnucash-docs/Makefile +++ b/finance/gnucash-docs/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnucash-docs DISTVERSION= 4.9 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= finance gnome MASTER_SITES= SF/gnucash/gnucash%20%28stable%29/${PORTVERSION} diff --git a/finance/gnucash/Makefile b/finance/gnucash/Makefile index 6eb78eb35f2..6491760647f 100644 --- a/finance/gnucash/Makefile +++ b/finance/gnucash/Makefile @@ -1,6 +1,6 @@ PORTNAME= gnucash PORTVERSION= 4.9 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= finance gnome MASTER_SITES= SF/${PORTNAME}/${PORTNAME}%20%28stable%29/${PORTVERSION} diff --git a/finance/grisbi/Makefile b/finance/grisbi/Makefile index cb73d6b2a93..ea95bc6d36d 100644 --- a/finance/grisbi/Makefile +++ b/finance/grisbi/Makefile @@ -2,6 +2,7 @@ PORTNAME= grisbi PORTVERSION= 2.0.5 +PORTREVISION= 1 CATEGORIES= finance MASTER_SITES= SF/${PORTNAME}/${PORTNAME}%20stable/${PORTVERSION:R}.x/${PORTVERSION} diff --git a/finance/libofx/Makefile b/finance/libofx/Makefile index 4ec3dec3c9f..56f71321494 100644 --- a/finance/libofx/Makefile +++ b/finance/libofx/Makefile @@ -2,6 +2,7 @@ PORTNAME= libofx PORTVERSION= 0.10.3 +PORTREVISION= 1 CATEGORIES= finance MASTER_SITES= https://github.com/${PORTNAME}/${PORTNAME}/releases/download/${PORTVERSION}/ \ SF/${PORTNAME}/${PORTNAME} diff --git a/finance/nextcloud-cospend/Makefile b/finance/nextcloud-cospend/Makefile index 6f13d140b47..01dd064e52b 100644 --- a/finance/nextcloud-cospend/Makefile +++ b/finance/nextcloud-cospend/Makefile @@ -1,7 +1,7 @@ # Created by: Michael Reifenberger PORTNAME= cospend -PORTVERSION= 1.4.5 +PORTVERSION= 1.4.6 DISTVERSIONPREFIX= v CATEGORIES= finance www editors MASTER_SITES= https://github.com/eneiluj/${PORTNAME}-nc/releases/download/${DISTVERSIONPREFIX}${PORTVERSION}/ diff --git a/finance/nextcloud-cospend/distinfo b/finance/nextcloud-cospend/distinfo index 9cf1bbacf7a..282bc07fc8e 100644 --- a/finance/nextcloud-cospend/distinfo +++ b/finance/nextcloud-cospend/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647021353 -SHA256 (nextcloud/cospend-1.4.5.tar.gz) = ba239cc5fdf78941c651b080bc481210653828662f8b50a9b2580277a221081a -SIZE (nextcloud/cospend-1.4.5.tar.gz) = 6004614 +TIMESTAMP = 1648454997 +SHA256 (nextcloud/cospend-1.4.6.tar.gz) = 2a381de66d9f648131bd9d3d92ae1a54cdf60b32fa5363ccff0bc1ebe0e7fdef +SIZE (nextcloud/cospend-1.4.6.tar.gz) = 5938121 diff --git a/finance/odoo/Makefile b/finance/odoo/Makefile index b2e7d70f2f2..bf903da8d6a 100644 --- a/finance/odoo/Makefile +++ b/finance/odoo/Makefile @@ -1,5 +1,6 @@ PORTNAME= odoo PORTVERSION= 14.0 +PORTREVISION= 1 CATEGORIES= finance www PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/finance/py-bitcoin/Makefile b/finance/py-bitcoin/Makefile index 380fbd76b3b..ece69fb9a93 100644 --- a/finance/py-bitcoin/Makefile +++ b/finance/py-bitcoin/Makefile @@ -13,7 +13,7 @@ LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE USES= python:3.6+ -USE_PYTHON= autoplist distutils +USE_PYTHON= distutils concurrent autoplist NO_ARCH= yes diff --git a/finance/py-python-obelisk/files/patch-2to3 b/finance/py-python-obelisk/files/patch-2to3 new file mode 100644 index 00000000000..10f5c0011e6 --- /dev/null +++ b/finance/py-python-obelisk/files/patch-2to3 @@ -0,0 +1,297 @@ +--- obelisk/bitcoin.py.orig 2014-08-10 17:00:16 UTC ++++ obelisk/bitcoin.py +@@ -21,10 +21,10 @@ import hashlib + import base64 + import ecdsa + import re +-from util import print_error +-import config +-import models +-import numbertheory ++from .util import print_error ++from . import config ++from . import models ++from . import numbertheory + import os + + +@@ -162,7 +162,7 @@ __b58base = len(__b58chars) + def b58encode(v): + """ encode v, which is a string of bytes, to base58.""" + +- long_value = 0L ++ long_value = 0 + for (i, c) in enumerate(v[::-1]): + long_value += (256**i) * ord(c) + +@@ -187,7 +187,7 @@ def b58encode(v): + + def b58decode(v, length): + """ decode v into a string of len bytes.""" +- long_value = 0L ++ long_value = 0 + for (i, c) in enumerate(v[::-1]): + long_value += __b58chars.find(c) * (__b58base**i) + +@@ -303,12 +303,12 @@ def is_valid(addr): + ########### end pywallet functions ####################### + + # secp256k1, http://www.oid-info.com/get/1.3.132.0.10 +-_p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2FL +-_r = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141L +-_b = 0x0000000000000000000000000000000000000000000000000000000000000007L +-_a = 0x0000000000000000000000000000000000000000000000000000000000000000L +-_Gx = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798L +-_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8L ++_p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F ++_r = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 ++_b = 0x0000000000000000000000000000000000000000000000000000000000000007 ++_a = 0x0000000000000000000000000000000000000000000000000000000000000000 ++_Gx = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 ++_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8 + curve_secp256k1 = ecdsa.ellipticcurve.CurveFp(_p, _a, _b) + generator_secp256k1 = ecdsa.ellipticcurve.Point(curve_secp256k1, _Gx, _Gy, _r) + oid_secp256k1 = (1, 3, 132, 0, 10) +@@ -698,7 +698,7 @@ class Transaction: + self.deserialize() + self.inputs = self.d['inputs'] + self.outputs = self.d['outputs'] +- self.outputs = map(lambda x: (x['address'], x['value']), self.outputs) ++ self.outputs = [(x['address'], x['value']) for x in self.outputs] + self.input_info = None + self.is_complete = True + +@@ -835,7 +835,7 @@ class Transaction: + return Hash(self.raw.decode('hex'))[::-1].encode('hex') + + def sign(self, private_keys): +- import deserialize ++ from . import deserialize + + for i in range(len(self.inputs)): + txin = self.inputs[i] +@@ -850,7 +850,7 @@ class Transaction: + + # build list of public/private keys + keypairs = {} +- for sec in private_keys.values(): ++ for sec in list(private_keys.values()): + compressed = is_compressed(sec) + pkey = regenerate_key(sec) + pubkey = GetPubKey(pkey.pubkey, compressed) +@@ -877,7 +877,7 @@ class Transaction: + else: + # check if we have a key + # corresponding to the redeem script +- if pubkey in keypairs.keys(): ++ if pubkey in list(keypairs.keys()): + # add signature + sec = keypairs[pubkey] + compressed = is_compressed(sec) +@@ -931,7 +931,7 @@ class Transaction: + self.raw = self.serialize(self.inputs, self.outputs) + + def deserialize(self): +- import deserialize ++ from . import deserialize + vds = deserialize.BCDataStream() + vds.write(self.raw.decode('hex')) + self.d = deserialize.parse_Transaction(vds) +--- obelisk/bittree.py.orig 2014-08-10 17:00:16 UTC ++++ obelisk/bittree.py +@@ -120,22 +120,22 @@ if __name__ == "__main__": + tree.add("010101", 666) + tree.add("010101", 888) + tree.add("101", 110) +- print tree ++ print(tree) + tree.add("10111", 116) +- print tree.lookup("101") +- print tree.lookup("10111") +- print tree.lookup("010") +- print tree.lookup("010101") +- print tree ++ print(tree.lookup("101")) ++ print(tree.lookup("10111")) ++ print(tree.lookup("010")) ++ print(tree.lookup("010101")) ++ print(tree) + tree.delete("10111", 116) +- print tree +- print tree.lookup("101") +- print tree.lookup("0") +- print tree.lookup("1") +- print "------------" ++ print(tree) ++ print(tree.lookup("101")) ++ print(tree.lookup("0")) ++ print(tree.lookup("1")) ++ print("------------") + tree = BitTree() + tree.add("10", 777) + tree.add("101", 333) + tree.add("1011", 222) + tree.add("00", 666) +- print tree.lookup("1011") ++ print(tree.lookup("1011")) +--- obelisk/client.py.orig 2014-08-10 17:00:16 UTC ++++ obelisk/client.py +@@ -1,10 +1,10 @@ + import struct + +-from zmqbase import ClientBase ++from .zmqbase import ClientBase + +-import bitcoin +-import serialize +-import error_code ++from . import bitcoin ++from . import serialize ++from . import error_code + + + def unpack_error(data): +@@ -234,9 +234,9 @@ class ObeliskOfLightClient(ClientBase): + self.subscribed += 1 + error = unpack_error(data) + if error: +- print "Error subscribing" ++ print("Error subscribing") + if not self.subscribed % 1000: +- print "Subscribed ok", self.subscribed ++ print("Subscribed ok", self.subscribed) + return (error, True) + + def _on_update(self, data): +@@ -257,7 +257,7 @@ class ObeliskOfLightClient(ClientBase): + self.subscribed += 1 + error = unpack_error(data) + if error: +- print "Error subscribing" ++ print("Error subscribing") + if not self.subscribed % 1000: +- print "Renew ok", self.subscribed ++ print("Renew ok", self.subscribed) + return (error, True) +--- obelisk/serialize.py.orig 2014-08-10 17:00:16 UTC ++++ obelisk/serialize.py +@@ -5,7 +5,7 @@ import io + import hashlib + from binascii import hexlify, unhexlify + from hashlib import sha256 +-import models ++from . import models + + + # Py3 compatibility +@@ -539,7 +539,7 @@ def deser_data(command, data_bytes): + elif command == "blockchain.fetch_block_transaction_hashes": + hash_list = [] + +- print hexlify(data_bytes[4:]) ++ print(hexlify(data_bytes[4:])) + + assert((len(data_bytes)-4) % 32 == 0) + +--- obelisk/util.py.orig 2014-08-10 17:00:16 UTC ++++ obelisk/util.py +@@ -106,7 +106,7 @@ def format_satoshis(x, is_diff=False, + from decimal import Decimal + s = Decimal(x) + sign, digits, exp = s.as_tuple() +- digits = map(str, digits) ++ digits = list(map(str, digits)) + while len(digits) < decimal_point + 1: + digits.insert(0, '0') + digits.insert(-decimal_point, '.') +@@ -201,6 +201,6 @@ def parse_url(url): + identity, signature = uv.split(':') + url = url.replace('&%s=%s' % (k, v), '') + else: +- print k, v ++ print(k, v) + + return address, amount, label, message, signature, identity, url +--- obelisk/zmqbase.py.orig 2014-08-10 17:00:16 UTC ++++ obelisk/zmqbase.py +@@ -7,7 +7,7 @@ import logging + # from zmqproto import ZmqSocket + #except ImportError: + # from zmq_fallback import ZmqSocket +-from zmq_fallback import ZmqSocket ++from .zmq_fallback import ZmqSocket + + + SNDMORE = 1 +@@ -50,10 +50,10 @@ class ClientBase(object): + self.trigger_callbacks(id, *res) + + def on_raw_block(self, height, hash, header, tx_num, tx_hashes): +- print "block", height, len(tx_hashes) ++ print("block", height, len(tx_hashes)) + + def on_raw_transaction(self, tx_data): +- print "tx", tx_data.encode('hex') ++ print("tx", tx_data.encode('hex')) + + # Base Api + def send_command(self, command, data='', cb=None): +@@ -71,7 +71,7 @@ class ClientBase(object): + return tx_id + + def unsubscribe(self, cb): +- for sub_id in self._subscriptions.keys(): ++ for sub_id in list(self._subscriptions.keys()): + if self._subscriptions[sub_id] == cb: + self._subscriptions.pop(sub_id) + +@@ -88,8 +88,8 @@ class ClientBase(object): + self._messages.append(frame) + if not more: + if not len(self._messages) == 3: +- print "Sequence with wrong messages", len(self._messages) +- print [m.encode("hex") for m in self._messages] ++ print("Sequence with wrong messages", len(self._messages)) ++ print([m.encode("hex") for m in self._messages]) + self._messages = [] + return + command, id, data = self._messages +@@ -102,9 +102,9 @@ class ClientBase(object): + if not more: + nblocks = struct.unpack('Q', self._block_messages[3])[0] + if not len(self._block_messages) == 4 + nblocks: +- print "Sequence with wrong messages",\ ++ print("Sequence with wrong messages",\ + len(self._block_messages),\ +- 4 + nblocks ++ 4 + nblocks) + self._block_messages = [] + return + height, hash, header, tx_num = self._block_messages[:4] +@@ -112,7 +112,7 @@ class ClientBase(object): + if len(tx_num) >= 4: + tx_num = struct.unpack_from('I', tx_num, 0)[0] + else: +- print "wrong tx_num length", len(tx_num), tx_num ++ print("wrong tx_num length", len(tx_num), tx_num) + tx_num = struct.unpack('I', tx_num.zfill(4))[0] + self._block_messages = [] + height = struct.unpack('I', height)[0] +@@ -122,7 +122,7 @@ class ClientBase(object): + self._tx_messages.append(frame) + if not more: + if not len(self._tx_messages) == 1: +- print "Sequence with wrong messages", len(self._tx_messages) ++ print("Sequence with wrong messages", len(self._tx_messages)) + self._tx_messages = [] + return + tx_data = self._tx_messages[0] +@@ -157,7 +157,7 @@ class ClientBase(object): + + # unpack + rows = [] +- for idx in xrange(nrows): ++ for idx in range(nrows): + offset = start+(idx*row_size) + row = struct.unpack_from(row_fmt, data, offset) + rows.append(row) diff --git a/finance/py-yfinance/Makefile b/finance/py-yfinance/Makefile index 00078550c6f..c08792b9b6f 100644 --- a/finance/py-yfinance/Makefile +++ b/finance/py-yfinance/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= yfinance -PORTVERSION= 0.1.69 +PORTVERSION= 0.1.70 CATEGORIES= finance python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -15,7 +15,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE.txt RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}lxml>=4.5.1:devel/py-lxml@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}multitasking>=0.0.7:devel/py-multitasking@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}numpy>=1.15,1:math/py-numpy@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pandas>=0.24,1:math/py-pandas@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pandas>=0.24.0,1:math/py-pandas@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}requests>=2.20:www/py-requests@${PY_FLAVOR} USES= python:3.7+ diff --git a/finance/py-yfinance/distinfo b/finance/py-yfinance/distinfo index d81cbf19cbe..28543412d39 100644 --- a/finance/py-yfinance/distinfo +++ b/finance/py-yfinance/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971106 -SHA256 (yfinance-0.1.69.tar.gz) = f8448c473188b0de5f16fe11125842545a6649a4caf43bfa24a931d57eac28e5 -SIZE (yfinance-0.1.69.tar.gz) = 24335 +TIMESTAMP = 1647264590 +SHA256 (yfinance-0.1.70.tar.gz) = a42190dd3b3fce1b00aec273db36392b8f100cc8c73dc7881bb558117cbf7c69 +SIZE (yfinance-0.1.70.tar.gz) = 24356 diff --git a/finance/quickfix/Makefile b/finance/quickfix/Makefile index 69a2d009959..47d481e2b22 100644 --- a/finance/quickfix/Makefile +++ b/finance/quickfix/Makefile @@ -2,6 +2,7 @@ PORTNAME= quickfix PORTVERSION= 1.15.1 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= finance devel diff --git a/french/verbiste/Makefile b/french/verbiste/Makefile index a300883c886..72bb310a5bc 100644 --- a/french/verbiste/Makefile +++ b/french/verbiste/Makefile @@ -2,6 +2,7 @@ PORTNAME= verbiste PORTVERSION= 0.1.47 +PORTREVISION= 1 CATEGORIES= french education textproc MASTER_SITES= http://sarrazip.com/dev/ diff --git a/ftp/R-cran-RCurl/Makefile b/ftp/R-cran-RCurl/Makefile index 6cbe43bf54b..99bac273a2b 100644 --- a/ftp/R-cran-RCurl/Makefile +++ b/ftp/R-cran-RCurl/Makefile @@ -2,6 +2,7 @@ PORTNAME= RCurl DISTVERSION= 1.98-1.6 +PORTREVISION= 1 CATEGORIES= ftp DISTNAME= ${PORTNAME}_${DISTVERSION} diff --git a/ftp/gstreamer1-plugins-curl/Makefile b/ftp/gstreamer1-plugins-curl/Makefile index a8de27b5172..4613fa2b807 100644 --- a/ftp/gstreamer1-plugins-curl/Makefile +++ b/ftp/gstreamer1-plugins-curl/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= ftp COMMENT= GStreamer curl based output plugin diff --git a/ftp/py-pycurl/Makefile b/ftp/py-pycurl/Makefile index 6b79328a4fb..4d35194eb09 100644 --- a/ftp/py-pycurl/Makefile +++ b/ftp/py-pycurl/Makefile @@ -1,7 +1,7 @@ # Created by: Hye-Shik Chang PORTNAME= pycurl -PORTVERSION= 7.44.1 +PORTVERSION= 7.45.1 CATEGORIES= ftp python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/ftp/py-pycurl/distinfo b/ftp/py-pycurl/distinfo index 7deff1c0d7b..74985a2d697 100644 --- a/ftp/py-pycurl/distinfo +++ b/ftp/py-pycurl/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632227546 -SHA256 (pycurl-7.44.1.tar.gz) = 5bcef4d988b74b99653602101e17d8401338d596b9234d263c728a0c3df003e8 -SIZE (pycurl-7.44.1.tar.gz) = 227562 +TIMESTAMP = 1647264592 +SHA256 (pycurl-7.45.1.tar.gz) = a863ad18ff478f5545924057887cdae422e1b2746e41674615f687498ea5b88a +SIZE (pycurl-7.45.1.tar.gz) = 233879 diff --git a/games/0ad/Makefile b/games/0ad/Makefile index 721b123f2e9..3ee1d82b006 100644 --- a/games/0ad/Makefile +++ b/games/0ad/Makefile @@ -1,6 +1,6 @@ PORTNAME= 0ad PORTVERSION= 0.0.25b -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= games MASTER_SITES= http://releases.wildfiregames.com/ \ SF/zero-ad/releases @@ -67,6 +67,8 @@ post-patch: -e '1441s/"execinfo",/& "ssp_nonshared",/' \ ${WRKSRC}/build/premake/premake5.lua .endif + @${ECHO_CMD} "patch < ../setuptools.diff" >> ${WRKSRC}/libraries/source/spidermonkey/patch.sh + @${CP} ${FILESDIR}/setuptools.diff ${WRKSRC}/libraries/source/spidermonkey/ pre-build: (cd ${WRKSRC}/build/workspaces && ${SETENV} ${MAKE_ENV} ./update-workspaces.sh --bindir=${PREFIX}/bin --datadir=${DATADIR} --libdir=${PREFIX}/lib/${PORTNAME}) diff --git a/games/0ad/files/setuptools.diff b/games/0ad/files/setuptools.diff new file mode 100644 index 00000000000..9cc0efb613c --- /dev/null +++ b/games/0ad/files/setuptools.diff @@ -0,0 +1,19 @@ +--- build/moz.configure/init.configure.orig 2020-11-28 16:33:01.000000000 +0000 ++++ build/moz.configure/init.configure 2022-03-22 20:13:07.253730000 +0000 +@@ -245,6 +245,7 @@ + @imports('sys') + @imports('subprocess') + @imports('distutils.sysconfig') ++@imports(_from='distutils', _import='sysconfig') + @imports(_from='mozbuild.configure.util', _import='LineIO') + @imports(_from='mozbuild.virtualenv', _import='VirtualenvManager') + @imports(_from='mozbuild.virtualenv', _import='verify_python_version') +@@ -375,7 +376,7 @@ + sys.exit(subprocess.call([python] + sys.argv)) + + # We are now in the virtualenv +- if not distutils.sysconfig.get_python_lib(): ++ if not sysconfig.get_python_lib(): + die('Could not determine python site packages directory') + + str_version = '.'.join(str(v) for v in version) diff --git a/games/Makefile b/games/Makefile index cfa10b590d9..283d72afdfd 100644 --- a/games/Makefile +++ b/games/Makefile @@ -1157,6 +1157,7 @@ SUBDIR += xzip SUBDIR += yadex SUBDIR += yahtzee + SUBDIR += yquake2 SUBDIR += zangband SUBDIR += zatacka SUBDIR += zaz diff --git a/games/armagetronad/Makefile b/games/armagetronad/Makefile index fba9fdd467f..41250221c25 100644 --- a/games/armagetronad/Makefile +++ b/games/armagetronad/Makefile @@ -1,7 +1,7 @@ PORTNAME= armagetronad DISTVERSION= 0.2.8.3.5 DISTVERSIONSUFFIX= .src -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= games MASTER_SITES= SF/${PORTNAME}/stable/${DISTVERSION}/ diff --git a/games/el/Makefile b/games/el/Makefile index e416a590f21..470003a39dc 100644 --- a/games/el/Makefile +++ b/games/el/Makefile @@ -2,6 +2,7 @@ PORTNAME= el DISTVERSION= 1.9.5.9-1 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= games diff --git a/games/exult/Makefile b/games/exult/Makefile index 259573236f3..4d1807c7667 100644 --- a/games/exult/Makefile +++ b/games/exult/Makefile @@ -3,6 +3,7 @@ PORTNAME= exult DISTVERSIONPREFIX= v DISTVERSION= 1.7.0.20211128 +PORTREVISION= 1 CATEGORIES= games MAINTAINER= ports@FreeBSD.org diff --git a/games/freedoko/Makefile b/games/freedoko/Makefile index 0c3a129b885..9e34a2fed3b 100644 --- a/games/freedoko/Makefile +++ b/games/freedoko/Makefile @@ -2,7 +2,7 @@ PORTNAME= freedoko PORTVERSION= 0.7.19 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= games MASTER_SITES= SF/free-doko/source DISTNAME= FreeDoko_${PORTVERSION}.src diff --git a/games/gtkatlantic/Makefile b/games/gtkatlantic/Makefile index d8ddd5d8075..e9e23987820 100644 --- a/games/gtkatlantic/Makefile +++ b/games/gtkatlantic/Makefile @@ -2,6 +2,7 @@ PORTNAME= gtkatlantic DISTVERSION= 0.6.3 +PORTREVISION= 1 CATEGORIES= games MASTER_SITES= http://download.tuxfamily.org/gtkatlantic/downloads/v${PORTVERSION:R}/ \ SF/nemysisfreebsdp/${CATEGORIES}/:icons diff --git a/games/gtkevemon/Makefile b/games/gtkevemon/Makefile index bd1d05ddad3..a080309b584 100644 --- a/games/gtkevemon/Makefile +++ b/games/gtkevemon/Makefile @@ -1,6 +1,6 @@ PORTNAME= gtkevemon PORTVERSION= 1.10.2016.02.17 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= games MAINTAINER= ports@FreeBSD.org diff --git a/games/gtkradiant/Makefile b/games/gtkradiant/Makefile index 38d9bec6e0a..1791a069877 100644 --- a/games/gtkradiant/Makefile +++ b/games/gtkradiant/Makefile @@ -2,7 +2,7 @@ PORTNAME= gtkradiant PORTVERSION= 1.5.0 -PORTREVISION= 16 +PORTREVISION= 17 CATEGORIES= games cad MASTER_SITES= IDSOFTWARE/source/:id \ http://www.bsd-geek.de/FreeBSD/distfiles/:fb diff --git a/games/libmaitretarot/Makefile b/games/libmaitretarot/Makefile index 5c1842bce4d..685c56ffe13 100644 --- a/games/libmaitretarot/Makefile +++ b/games/libmaitretarot/Makefile @@ -2,7 +2,7 @@ PORTNAME= libmaitretarot PORTVERSION= 0.1.98 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= games MASTER_SITES= SAVANNAH/maitretarot diff --git a/games/libmt_client/Makefile b/games/libmt_client/Makefile index 620990c2854..1cb24273ba7 100644 --- a/games/libmt_client/Makefile +++ b/games/libmt_client/Makefile @@ -2,7 +2,7 @@ PORTNAME= libmt_client PORTVERSION= 0.1.98 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= games MASTER_SITES= SAVANNAH/maitretarot diff --git a/games/lincity-ng/Makefile b/games/lincity-ng/Makefile index 1abacb1a21e..af4d4d246f2 100644 --- a/games/lincity-ng/Makefile +++ b/games/lincity-ng/Makefile @@ -2,7 +2,7 @@ PORTNAME= lincity-ng PORTVERSION= 2.0 -PORTREVISION= 17 +PORTREVISION= 18 CATEGORIES= games MASTER_SITES= BERLIOS diff --git a/games/lordsawar/Makefile b/games/lordsawar/Makefile index d94c25b8207..28bf357dbff 100644 --- a/games/lordsawar/Makefile +++ b/games/lordsawar/Makefile @@ -1,5 +1,6 @@ PORTNAME= lordsawar PORTVERSION= 0.3.2 +PORTREVISION= 1 CATEGORIES= games MASTER_SITES= SAVANNAH diff --git a/games/maitretarot/Makefile b/games/maitretarot/Makefile index 5862a50997e..d5594c5a1ba 100644 --- a/games/maitretarot/Makefile +++ b/games/maitretarot/Makefile @@ -2,7 +2,7 @@ PORTNAME= maitretarot PORTVERSION= 0.1.98 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= games MASTER_SITES= SAVANNAH diff --git a/games/manaplus/Makefile b/games/manaplus/Makefile index 9e8b32a7354..7fa342578c2 100644 --- a/games/manaplus/Makefile +++ b/games/manaplus/Makefile @@ -2,7 +2,7 @@ PORTNAME= manaplus DISTVERSION= 2.1.3.17 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= games MASTER_SITES= http://download.evolonline.org/manaplus/download/${PORTVERSION}/ diff --git a/games/mt_dolphin_ia/Makefile b/games/mt_dolphin_ia/Makefile index e2b9b9b82ad..a45b4b9d20b 100644 --- a/games/mt_dolphin_ia/Makefile +++ b/games/mt_dolphin_ia/Makefile @@ -2,7 +2,7 @@ PORTNAME= mt_dolphin_ia PORTVERSION= 0.1.98 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= games MASTER_SITES= SAVANNAH/maitretarot diff --git a/games/mt_gtk_client/Makefile b/games/mt_gtk_client/Makefile index 73a953a09ac..7a6c7fc42d5 100644 --- a/games/mt_gtk_client/Makefile +++ b/games/mt_gtk_client/Makefile @@ -2,7 +2,7 @@ PORTNAME= mt_gtk_client PORTVERSION= 0.1.98 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= games MASTER_SITES= SAVANNAH/maitretarot diff --git a/games/naev/Makefile b/games/naev/Makefile index d32309c4997..9eb31d6816d 100644 --- a/games/naev/Makefile +++ b/games/naev/Makefile @@ -2,7 +2,7 @@ PORTNAME= naev PORTVERSION= 0.7.0 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= games MASTER_SITES= SF/${PORTNAME}/${PORTNAME}-${PORTVERSION}/ diff --git a/games/netradiant/Makefile b/games/netradiant/Makefile index 45679440af1..dccc199dcc7 100644 --- a/games/netradiant/Makefile +++ b/games/netradiant/Makefile @@ -2,7 +2,7 @@ PORTNAME= netradiant PORTVERSION= 20150621 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= games cad MASTER_SITES= http://ingar.intranifty.net/gtkradiant/files/ LOCAL/danfe \ http://ingar.intranifty.net/gtkradiant/files/gamepacks/:gp diff --git a/games/openlierox/Makefile b/games/openlierox/Makefile index 54853cc6c02..ac19b4e01cf 100644 --- a/games/openlierox/Makefile +++ b/games/openlierox/Makefile @@ -1,6 +1,6 @@ PORTNAME= openlierox DISTVERSION= 0.58_rc5 -PORTREVISION= 1 +PORTREVISION= 2 PORTEPOCH= 1 CATEGORIES= games MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/OpenLieroX%20${DISTVERSION:C/_/%20/} diff --git a/games/py-pychess/Makefile b/games/py-pychess/Makefile index c406c05883c..4be70060a55 100644 --- a/games/py-pychess/Makefile +++ b/games/py-pychess/Makefile @@ -2,7 +2,7 @@ PORTNAME= pychess PORTVERSION= 1.0.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= games python MASTER_SITES= https://github.com/pychess/pychess/releases/download/${PORTVERSION}/ diff --git a/games/tuxmath/Makefile b/games/tuxmath/Makefile index 495e4e1277c..91c107d4d04 100644 --- a/games/tuxmath/Makefile +++ b/games/tuxmath/Makefile @@ -2,7 +2,7 @@ PORTNAME= tuxmath PORTVERSION= 2.0.3 -PORTREVISION= 8 +PORTREVISION= 9 PORTEPOCH= 1 CATEGORIES= games education MASTER_SITES= DEBIAN_POOL \ diff --git a/games/tuxtype/Makefile b/games/tuxtype/Makefile index 20d960551e1..d83a95d352b 100644 --- a/games/tuxtype/Makefile +++ b/games/tuxtype/Makefile @@ -2,7 +2,7 @@ PORTNAME= tuxtype PORTVERSION= 1.8.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= games education MASTER_SITES= DEBIAN_POOL DISTNAME= ${PORTNAME}_${PORTVERSION}.orig diff --git a/games/xmoto/Makefile b/games/xmoto/Makefile index f9178f5e9a4..257641f548c 100644 --- a/games/xmoto/Makefile +++ b/games/xmoto/Makefile @@ -1,5 +1,6 @@ PORTNAME= xmoto PORTVERSION= 0.6.1 +PORTREVISION= 1 CATEGORIES= games MAINTAINER= amdmi3@FreeBSD.org diff --git a/games/yquake2/Makefile b/games/yquake2/Makefile new file mode 100644 index 00000000000..47a627afb69 --- /dev/null +++ b/games/yquake2/Makefile @@ -0,0 +1,45 @@ +# Created by: Vasily Postnicov + +PORTNAME= yquake2 +PORTVERSION= 8.01 +CATEGORIES= games +MASTER_SITES= https://deponie.yamagi.org/quake2/ +DISTNAME= quake2-${PORTVERSION} + +MAINTAINER= shamaz.mazum@gmail.com +COMMENT= Improved version of Icculus Quake II + +LICENSE= GPLv2 +LICENSE_FILE= ${WRKSRC}/LICENSE + +# Quake II loads optional libraries via dlopen(3) +BUILD_DEPENDS= ${LOCALBASE}/include/curl/curl.h:ftp/curl +RUN_DEPENDS= ${LOCALBASE}/lib/libcurl.so:ftp/curl + +USES= gmake openal gl sdl tar:xz +USE_GL= gl +USE_SDL= sdl2 +MAKE_ARGS= VERBOSE=1 WITH_SYSTEMWIDE=yes \ + WITH_SYSTEMDATADIR="${Q2DIR}" \ + WITH_SYSTEMLIBDIR="${PREFIX}/lib/${PORTNAME}" + +OPTIONS_DEFINE= DOCS + +do-install: + ${INSTALL_PROGRAM} ${WRKSRC}/release/q2ded \ + ${STAGEDIR}${PREFIX}/bin/yq2ded + ${INSTALL_PROGRAM} ${WRKSRC}/release/quake2 \ + ${STAGEDIR}${PREFIX}/bin/${PORTNAME} + @${MKDIR} ${STAGEDIR}${PREFIX}/lib/${PORTNAME}/baseq2 + ${INSTALL_LIB} ${WRKSRC}/release/baseq2/game.so \ + ${STAGEDIR}${PREFIX}/lib/${PORTNAME}/baseq2 + ${INSTALL_LIB} ${WRKSRC}/release/ref_*.so \ + ${STAGEDIR}${PREFIX}/lib/${PORTNAME} + +post-install-DOCS-on: + @${MKDIR} ${STAGEDIR}${DOCSDIR} + ${INSTALL_DATA} ${WRKSRC}/CHANGELOG ${WRKSRC}/doc/*.md \ + ${STAGEDIR}${DOCSDIR} + +.include "${.CURDIR}/../quake2-data/Makefile.include" +.include diff --git a/games/yquake2/distinfo b/games/yquake2/distinfo new file mode 100644 index 00000000000..ee74449e761 --- /dev/null +++ b/games/yquake2/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1642841575 +SHA256 (quake2-8.01.tar.xz) = 132889a6976dd7c75bf94a4ca49c76ff09cf843d66541872320d7a02a25a622c +SIZE (quake2-8.01.tar.xz) = 2109408 diff --git a/games/yquake2/files/patch-Makefile b/games/yquake2/files/patch-Makefile new file mode 100644 index 00000000000..0d09f6f0be6 --- /dev/null +++ b/games/yquake2/files/patch-Makefile @@ -0,0 +1,28 @@ +--- Makefile.orig 2022-01-22 08:51:28 UTC ++++ Makefile +@@ -251,10 +251,13 @@ endif + # Systemwide installation. + ifeq ($(WITH_SYSTEMWIDE),yes) + override CFLAGS += -DSYSTEMWIDE +-ifneq ($(WITH_SYSTEMDIR),"") +-override CFLAGS += -DSYSTEMDIR=\"$(WITH_SYSTEMDIR)\" ++ifneq ($(WITH_SYSTEMDATADIR),"") ++override CFLAGS += -DSYSTEMDATADIR=\"$(WITH_SYSTEMDATADIR)\" + endif ++ifneq ($(WITH_SYSTEMLIBDIR),"") ++override CFLAGS += -DSYSTEMLIBDIR=\"$(WITH_SYSTEMLIBDIR)\" + endif ++endif + + # ---------- + +@@ -405,7 +408,8 @@ config: + @echo "WITH_OPENAL = $(WITH_OPENAL)" + @echo "WITH_RPATH = $(WITH_RPATH)" + @echo "WITH_SYSTEMWIDE = $(WITH_SYSTEMWIDE)" +- @echo "WITH_SYSTEMDIR = $(WITH_SYSTEMDIR)" ++ @echo "WITH_SYSTEMDATADIR = $(WITH_SYSTEMDATADIR)" ++ @echo "WITH_SYSTEMLIBDIR = $(WITH_SYSTEMLIBDIR)" + @echo "============================" + @echo "" + diff --git a/games/yquake2/files/patch-src_client_vid_vid.c b/games/yquake2/files/patch-src_client_vid_vid.c new file mode 100644 index 00000000000..3238c79518d --- /dev/null +++ b/games/yquake2/files/patch-src_client_vid_vid.c @@ -0,0 +1,17 @@ +--- src/client/vid/vid.c.orig 2022-01-22 08:51:28 UTC ++++ src/client/vid/vid.c +@@ -318,7 +318,13 @@ const char* lib_ext = "so"; + static void + VID_GetRendererLibPath(const char *renderer, char *path, size_t len) + { +- snprintf(path, len, "%sref_%s.%s", Sys_GetBinaryDir(), renderer, lib_ext); ++ char *next_path = NULL; ++ ++ while ((next_path = FS_GetNextRawPath (next_path)) != NULL) { ++ snprintf(path, len, "%s/ref_%s.%s", next_path, renderer, lib_ext); ++ if (Sys_IsFile(path)) ++ break; ++ } + } + + /* diff --git a/games/yquake2/files/patch-src_common_filesystem.c b/games/yquake2/files/patch-src_common_filesystem.c new file mode 100644 index 00000000000..82703839430 --- /dev/null +++ b/games/yquake2/files/patch-src_common_filesystem.c @@ -0,0 +1,12 @@ +--- src/common/filesystem.c.orig 2022-01-22 08:51:28 UTC ++++ src/common/filesystem.c +@@ -2029,7 +2029,8 @@ void FS_BuildRawPath(void) { + // binary compiled with SYSTEMWIDE (installed from + // packages), but no systemwide game data. + #ifdef SYSTEMWIDE +- FS_AddDirToRawPath(SYSTEMDIR, false, false); ++ FS_AddDirToRawPath(SYSTEMDATADIR, false, false); ++ FS_AddDirToRawPath(SYSTEMLIBDIR, false, false); + #endif + + // The CD must be the last directory of the path, diff --git a/games/yquake2/pkg-descr b/games/yquake2/pkg-descr new file mode 100644 index 00000000000..5929463f39f --- /dev/null +++ b/games/yquake2/pkg-descr @@ -0,0 +1,9 @@ +Yamagi Quake II is an enhanced client for id Software's Quake II with +focus on offline and cooperative gameplay. Both the gameplay and the +graphics are unchanged, but many bugs in the last official release were +fixed and some nice to have features like widescreen support and a modern +OpenGL 3.2 renderer were added. Unlike most other Quake II source ports +Yamagi Quake II is fully 64-bit clean. It works perfectly on modern +processors and operating systems. + +WWW: https://www.yamagi.org/quake2/ diff --git a/games/yquake2/pkg-plist b/games/yquake2/pkg-plist new file mode 100644 index 00000000000..fce8f627ae2 --- /dev/null +++ b/games/yquake2/pkg-plist @@ -0,0 +1,16 @@ +bin/yq2ded +bin/yquake2 +lib/yquake2/baseq2/game.so +lib/yquake2/ref_gl1.so +lib/yquake2/ref_gl3.so +lib/yquake2/ref_soft.so +%%PORTDOCS%%%%DOCSDIR%%/010_index.md +%%PORTDOCS%%%%DOCSDIR%%/020_installation.md +%%PORTDOCS%%%%DOCSDIR%%/030_configuration.md +%%PORTDOCS%%%%DOCSDIR%%/040_cvarlist.md +%%PORTDOCS%%%%DOCSDIR%%/050_commands.md +%%PORTDOCS%%%%DOCSDIR%%/060_multiplayer.md +%%PORTDOCS%%%%DOCSDIR%%/070_packaging.md +%%PORTDOCS%%%%DOCSDIR%%/080_contributing.md +%%PORTDOCS%%%%DOCSDIR%%/090_filelists.md +%%PORTDOCS%%%%DOCSDIR%%/CHANGELOG diff --git a/graphics/GraphicsMagick/Makefile b/graphics/GraphicsMagick/Makefile index f15038e4935..aa1268a4056 100644 --- a/graphics/GraphicsMagick/Makefile +++ b/graphics/GraphicsMagick/Makefile @@ -2,6 +2,7 @@ PORTNAME= GraphicsMagick PORTVERSION= 1.3.37 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= graphics MASTER_SITES= SF diff --git a/graphics/ImageMagick6/Makefile b/graphics/ImageMagick6/Makefile index d7ad0cbe50a..63cd062cd45 100644 --- a/graphics/ImageMagick6/Makefile +++ b/graphics/ImageMagick6/Makefile @@ -1,6 +1,6 @@ PORTNAME= ImageMagick DISTVERSION= 6.9.12-34 -PORTREVISION= 1 +PORTREVISION= 2 PORTEPOCH= 1 CATEGORIES= graphics perl5 MASTER_SITES= https://www.imagemagick.org/download/ \ diff --git a/graphics/ImageMagick7/Makefile b/graphics/ImageMagick7/Makefile index 0f6ae878a5e..1c8d11a930b 100644 --- a/graphics/ImageMagick7/Makefile +++ b/graphics/ImageMagick7/Makefile @@ -1,6 +1,6 @@ PORTNAME= ImageMagick DISTVERSION= 7.1.0-19 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= graphics perl5 MASTER_SITES= https://www.imagemagick.org/download/ \ https://www.imagemagick.org/download/releases/ \ diff --git a/graphics/airsaned/Makefile b/graphics/airsaned/Makefile index 5dcd9fad4f2..53d95540c2d 100644 --- a/graphics/airsaned/Makefile +++ b/graphics/airsaned/Makefile @@ -1,8 +1,7 @@ PORTNAME= airsaned DISTVERSIONPREFIX= v -DISTVERSION= 0.3.2-54 -DISTVERSIONSUFFIX= -g433b762 -PORTREVISION= 1 +DISTVERSION= 0.3.3-0 +DISTVERSIONSUFFIX= -gg8e5dc45 CATEGORIES= graphics MAINTAINER= fbsd@opal.com diff --git a/graphics/airsaned/distinfo b/graphics/airsaned/distinfo index c90db0c52fb..2b96a7d1b22 100644 --- a/graphics/airsaned/distinfo +++ b/graphics/airsaned/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641840693 -SHA256 (SimulPiscator-AirSane-v0.3.2-54-g433b762_GH0.tar.gz) = 9b32b7a1e9023137901b999d5803b613e057a2ff700844d623f172b4354668a1 -SIZE (SimulPiscator-AirSane-v0.3.2-54-g433b762_GH0.tar.gz) = 116180 +TIMESTAMP = 1648405553 +SHA256 (SimulPiscator-AirSane-v0.3.3-0-g8e5dc45_GH0.tar.gz) = 2bc8e0a91bee55e142f176318711025fe8d2b7a926948d38f4f5552b387d2702 +SIZE (SimulPiscator-AirSane-v0.3.3-0-g8e5dc45_GH0.tar.gz) = 117059 diff --git a/graphics/art/Makefile b/graphics/art/Makefile index 61b6392b901..6cb1206a236 100644 --- a/graphics/art/Makefile +++ b/graphics/art/Makefile @@ -1,5 +1,6 @@ PORTNAME= art DISTVERSION= 1.12.1 +PORTREVISION= 1 CATEGORIES= graphics MASTER_SITES= https://bitbucket.org/agriggio/art/downloads/ PKGNAMESUFFIX= -raw-image-editor diff --git a/graphics/aseprite/Makefile b/graphics/aseprite/Makefile index fa8f5e83ec5..9f225ec44a5 100644 --- a/graphics/aseprite/Makefile +++ b/graphics/aseprite/Makefile @@ -3,7 +3,7 @@ PORTNAME= aseprite DISTVERSIONPREFIX= v DISTVERSION= 1.2.9 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= graphics MAINTAINER= yuri@FreeBSD.org diff --git a/graphics/atril-lite/Makefile b/graphics/atril-lite/Makefile index c6e9b1a9eb8..febbeb8cec7 100644 --- a/graphics/atril-lite/Makefile +++ b/graphics/atril-lite/Makefile @@ -1,5 +1,6 @@ # Created by: Adam Weinberger +PORTREVISION= 1 PKGNAMESUFFIX= -lite OPTIONS_EXCLUDE= CAJA DBUS diff --git a/graphics/atril/Makefile b/graphics/atril/Makefile index c99898a19e8..4addda4f166 100644 --- a/graphics/atril/Makefile +++ b/graphics/atril/Makefile @@ -4,7 +4,7 @@ PORTNAME= atril PORTVERSION= 1.26.0 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= graphics print mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/graphics/autopano-sift-c/Makefile b/graphics/autopano-sift-c/Makefile index fe9dbaa2807..15f997e5733 100644 --- a/graphics/autopano-sift-c/Makefile +++ b/graphics/autopano-sift-c/Makefile @@ -1,6 +1,6 @@ PORTNAME= autopano-sift-C PORTVERSION= 2.5.1 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= graphics MASTER_SITES= SF/hugin/${PORTNAME}/${PORTNAME}-${PORTVERSION} diff --git a/graphics/blender-lts28/Makefile b/graphics/blender-lts28/Makefile index 568c51265dd..8c385257eee 100644 --- a/graphics/blender-lts28/Makefile +++ b/graphics/blender-lts28/Makefile @@ -1,5 +1,6 @@ PORTNAME= blender DISTVERSION= 2.83.18 +PORTREVISION= 1 CATEGORIES= graphics multimedia MASTER_SITES= http://download.blender.org/source/ \ http://mirror.cs.umn.edu/blender.org/source/ \ diff --git a/graphics/blender-lts29/Makefile b/graphics/blender-lts29/Makefile index f1470927030..bfca4dca482 100644 --- a/graphics/blender-lts29/Makefile +++ b/graphics/blender-lts29/Makefile @@ -1,5 +1,6 @@ PORTNAME= blender DISTVERSION= 2.93.6 +PORTREVISION= 1 CATEGORIES= graphics multimedia MASTER_SITES= http://download.blender.org/source/ \ http://mirror.cs.umn.edu/blender.org/source/ \ diff --git a/graphics/blender/Makefile b/graphics/blender/Makefile index 3a24af5f817..90642b2f98d 100644 --- a/graphics/blender/Makefile +++ b/graphics/blender/Makefile @@ -1,5 +1,6 @@ PORTNAME= blender DISTVERSION= 3.0.0 +PORTREVISION= 1 CATEGORIES= graphics multimedia MASTER_SITES= http://download.blender.org/source/ \ http://mirror.cs.umn.edu/blender.org/source/ \ diff --git a/graphics/cairomm/Makefile b/graphics/cairomm/Makefile index 3a322de7b8f..2414e1ca017 100644 --- a/graphics/cairomm/Makefile +++ b/graphics/cairomm/Makefile @@ -2,7 +2,7 @@ PORTNAME= cairomm PORTVERSION= 1.12.2 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= graphics MASTER_SITES= http://cairographics.org/releases/ diff --git a/graphics/cegui/Makefile b/graphics/cegui/Makefile index 5d33077d0f2..4fa1b5a3f3b 100644 --- a/graphics/cegui/Makefile +++ b/graphics/cegui/Makefile @@ -2,7 +2,7 @@ PORTNAME= cegui PORTVERSION= 0.8.7 -PORTREVISION= 17 +PORTREVISION= 18 CATEGORIES= graphics devel MASTER_SITES= SF/crayzedsgui/CEGUI%20Mk-2/0.8 diff --git a/graphics/cluttermm/Makefile b/graphics/cluttermm/Makefile index 9d1b707eca5..81d96de880d 100644 --- a/graphics/cluttermm/Makefile +++ b/graphics/cluttermm/Makefile @@ -3,7 +3,7 @@ PORTNAME= cluttermm PORTVERSION= 1.17.3 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= graphics MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/graphics/cptutils/Makefile b/graphics/cptutils/Makefile index df2f276d9ae..72af5fd2c3a 100644 --- a/graphics/cptutils/Makefile +++ b/graphics/cptutils/Makefile @@ -1,5 +1,6 @@ PORTNAME= cptutils PORTVERSION= 1.74 +PORTREVISION= 1 CATEGORIES= graphics MASTER_SITES= http://soliton.vm.bytemark.co.uk/pub/jjg/src/ diff --git a/graphics/darktable/Makefile b/graphics/darktable/Makefile index 556c842b892..688ce82dad6 100644 --- a/graphics/darktable/Makefile +++ b/graphics/darktable/Makefile @@ -2,7 +2,7 @@ PORTNAME= darktable PORTVERSION= 3.8.1 -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics MASTER_SITES= https://github.com/darktable-org/${PORTNAME}/releases/download/release-${PORTVERSION:C/\.rc/rc/}/ diff --git a/graphics/dcmtk/Makefile b/graphics/dcmtk/Makefile index d58680f48a8..27e507a0a33 100644 --- a/graphics/dcmtk/Makefile +++ b/graphics/dcmtk/Makefile @@ -3,7 +3,7 @@ PORTNAME= dcmtk DISTVERSIONPREFIX= ${PORTNAME:tu}- DISTVERSION= 3.6.6 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= graphics devel MAINTAINER= yuri@FreeBSD.org diff --git a/graphics/delaboratory/Makefile b/graphics/delaboratory/Makefile index d2d6013ced3..703192af4f5 100644 --- a/graphics/delaboratory/Makefile +++ b/graphics/delaboratory/Makefile @@ -2,7 +2,7 @@ PORTNAME= delaboratory PORTVERSION= 0.8 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= graphics MASTER_SITES= https://BSDforge.com/projects/source/graphics/delaboratory/ diff --git a/graphics/dia/Makefile b/graphics/dia/Makefile index f3ded78b3ad..f561eab1754 100644 --- a/graphics/dia/Makefile +++ b/graphics/dia/Makefile @@ -3,7 +3,7 @@ PORTNAME= dia PORTVERSION= 0.97.3 -PORTREVISION= 1 +PORTREVISION= 2 PORTEPOCH= 1 CATEGORIES= graphics gnome MASTER_SITES= GNOME diff --git a/graphics/digikam/Makefile b/graphics/digikam/Makefile index adfdf07a8e8..6417c77389f 100644 --- a/graphics/digikam/Makefile +++ b/graphics/digikam/Makefile @@ -1,5 +1,6 @@ PORTNAME= digikam DISTVERSION= 7.6.0 +PORTREVISION= 1 CATEGORIES= graphics kde MASTER_SITES= KDE/stable/${PORTNAME}/${PORTVERSION} DIST_SUBDIR= KDE/digikam/${PORTVERSION} diff --git a/graphics/elementary-photos/Makefile b/graphics/elementary-photos/Makefile index 417c79159c7..88564e4f526 100644 --- a/graphics/elementary-photos/Makefile +++ b/graphics/elementary-photos/Makefile @@ -1,6 +1,6 @@ PORTNAME= elementary-photos DISTVERSION= 2.7.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= graphics MAINTAINER= miguel@gocobachi.dev diff --git a/graphics/eog-plugins/Makefile b/graphics/eog-plugins/Makefile index 9c83af93028..1e8266db5e3 100644 --- a/graphics/eog-plugins/Makefile +++ b/graphics/eog-plugins/Makefile @@ -2,6 +2,7 @@ PORTNAME= eog-plugins DISTVERSION= 3.26.8 +PORTREVISION= 1 CATEGORIES= graphics gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/graphics/eom/Makefile b/graphics/eom/Makefile index 30ef17db627..b544b9327b5 100644 --- a/graphics/eom/Makefile +++ b/graphics/eom/Makefile @@ -2,6 +2,7 @@ PORTNAME= eom PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= graphics mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/graphics/evince/Makefile b/graphics/evince/Makefile index 2c90ce6e550..716e04e1099 100644 --- a/graphics/evince/Makefile +++ b/graphics/evince/Makefile @@ -2,7 +2,7 @@ PORTNAME= evince DISTVERSION= 41.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= graphics print gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/graphics/flam3/Makefile b/graphics/flam3/Makefile index 19c1baf994d..9ac03da5e3a 100644 --- a/graphics/flam3/Makefile +++ b/graphics/flam3/Makefile @@ -3,6 +3,7 @@ PORTNAME= flam3 DISTVERSIONPREFIX= v DISTVERSION= 3.1.1-5 +PORTREVISION= 1 DISTVERSIONSUFFIX= -g7fb50c8 CATEGORIES= graphics diff --git a/graphics/frogr/Makefile b/graphics/frogr/Makefile index 3257bd622d9..bd339e4bdd0 100644 --- a/graphics/frogr/Makefile +++ b/graphics/frogr/Makefile @@ -2,6 +2,7 @@ PORTNAME= frogr PORTVERSION= 1.6 +PORTREVISION= 1 CATEGORIES= graphics MASTER_SITES= GNOME diff --git a/graphics/fyre/Makefile b/graphics/fyre/Makefile index e952f34e2c9..5022be9db94 100644 --- a/graphics/fyre/Makefile +++ b/graphics/fyre/Makefile @@ -2,7 +2,7 @@ PORTNAME= fyre PORTVERSION= 1.0.1 -PORTREVISION= 23 +PORTREVISION= 24 CATEGORIES= graphics MASTER_SITES= http://releases.navi.cx/fyre/ diff --git a/graphics/gdal/Makefile b/graphics/gdal/Makefile index 0cfc7cac5cc..5cd9500a265 100644 --- a/graphics/gdal/Makefile +++ b/graphics/gdal/Makefile @@ -2,7 +2,7 @@ PORTNAME= gdal PORTVERSION= 3.4.1 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= graphics geography MASTER_SITES= https://download.osgeo.org/gdal/${PORTVERSION}/ \ LOCAL/sunpoet diff --git a/graphics/gdk-pixbuf2/Makefile b/graphics/gdk-pixbuf2/Makefile index 00840aabaa7..cda5d0dc675 100644 --- a/graphics/gdk-pixbuf2/Makefile +++ b/graphics/gdk-pixbuf2/Makefile @@ -2,6 +2,7 @@ PORTNAME= gdk-pixbuf PORTVERSION= 2.40.0 +PORTREVISION= 2 CATEGORIES= graphics MASTER_SITES= GNOME PKGNAMESUFFIX= 2 @@ -16,11 +17,13 @@ LICENSE_FILE= ${WRKSRC}/COPYING BUILD_DEPENDS= docbook-xsl>0:textproc/docbook-xsl USES= cpe gettext gnome localbase:ldflags meson pkgconfig \ - python:3.4+,build shared-mime-info tar:xz + python:3.4+,build shared-mime-info tar:xz trigger CPE_VENDOR= gnome USE_GNOME= glib20 introspection:build libxslt:build USE_LDCONFIG= yes MESON_ARGS= -Dman=true +TRIGGERS= gdk-pixbuf-query-loaders +SUB_LIST= GTK2_VERSION=${GTK2_VERSION} BINARY_ALIAS= python3=${PYTHON_VERSION} diff --git a/graphics/gdk-pixbuf2/files/gdk-pixbuf-query-loaders.ucl.in b/graphics/gdk-pixbuf2/files/gdk-pixbuf-query-loaders.ucl.in new file mode 100644 index 00000000000..eae70e206be --- /dev/null +++ b/graphics/gdk-pixbuf2/files/gdk-pixbuf-query-loaders.ucl.in @@ -0,0 +1,17 @@ +path: "%%PREFIX%%/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders" +cleanup: { + type: lua + sandbox: false + script: < /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true -@postunexec rm %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>&1 >/dev/null || true diff --git a/graphics/gimmage/Makefile b/graphics/gimmage/Makefile index 7039bcc1462..9a5d89184e2 100644 --- a/graphics/gimmage/Makefile +++ b/graphics/gimmage/Makefile @@ -2,7 +2,7 @@ PORTNAME= gimmage PORTVERSION= 0.2.3 -PORTREVISION= 16 +PORTREVISION= 17 CATEGORIES= graphics MASTER_SITES= BERLIOS diff --git a/graphics/gnash/Makefile b/graphics/gnash/Makefile index e0ceab4461e..7f3be1e4060 100644 --- a/graphics/gnash/Makefile +++ b/graphics/gnash/Makefile @@ -1,6 +1,6 @@ PORTNAME= gnash PORTVERSION= 0.8.10 -PORTREVISION= 47 +PORTREVISION= 48 CATEGORIES= graphics MASTER_SITES= http://git.savannah.gnu.org/cgit/gnash.git/snapshot/ DISTNAME= gnash-2b3bdede0305c4fc3ad21a0a4197330606c9b880 diff --git a/graphics/goocanvasmm2/Makefile b/graphics/goocanvasmm2/Makefile index 22beedba4a2..384ef0a89a3 100644 --- a/graphics/goocanvasmm2/Makefile +++ b/graphics/goocanvasmm2/Makefile @@ -3,7 +3,7 @@ PORTNAME= goocanvasmm PORTVERSION= 1.90.9 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= graphics MASTER_SITES= GNOME PKGNAMESUFFIX= 2 diff --git a/graphics/gpaint/Makefile b/graphics/gpaint/Makefile index 20923268f9f..3ea2aac3ea1 100644 --- a/graphics/gpaint/Makefile +++ b/graphics/gpaint/Makefile @@ -3,7 +3,7 @@ PORTNAME= gpaint PORTVERSION= 0.3.3 DISTVERSIONPREFIX= 2- -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= graphics gnome MASTER_SITES= GNU_ALPHA diff --git a/graphics/gstreamer1-plugins-aalib/Makefile b/graphics/gstreamer1-plugins-aalib/Makefile index dbac7a2bab9..310e73421fb 100644 --- a/graphics/gstreamer1-plugins-aalib/Makefile +++ b/graphics/gstreamer1-plugins-aalib/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer ASCII art videosink plugin diff --git a/graphics/gstreamer1-plugins-cairo/Makefile b/graphics/gstreamer1-plugins-cairo/Makefile index 534815daae0..d21122be054 100644 --- a/graphics/gstreamer1-plugins-cairo/Makefile +++ b/graphics/gstreamer1-plugins-cairo/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer overlay renderer plugin based on cairo diff --git a/graphics/gstreamer1-plugins-gdkpixbuf/Makefile b/graphics/gstreamer1-plugins-gdkpixbuf/Makefile index f9f7e0003e2..e4945224274 100644 --- a/graphics/gstreamer1-plugins-gdkpixbuf/Makefile +++ b/graphics/gstreamer1-plugins-gdkpixbuf/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics PKGNAMESUFFIX= 1-plugins-gdkpixbuf diff --git a/graphics/gstreamer1-plugins-gl/Makefile b/graphics/gstreamer1-plugins-gl/Makefile index 165c1ab3102..ec7ce74a744 100644 --- a/graphics/gstreamer1-plugins-gl/Makefile +++ b/graphics/gstreamer1-plugins-gl/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= graphics COMMENT= GStreamer GL graphics plugin diff --git a/graphics/gstreamer1-plugins-jpeg/Makefile b/graphics/gstreamer1-plugins-jpeg/Makefile index ab3ccc9723d..3140d395187 100644 --- a/graphics/gstreamer1-plugins-jpeg/Makefile +++ b/graphics/gstreamer1-plugins-jpeg/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer jpeg encoder/decoder plugin diff --git a/graphics/gstreamer1-plugins-kms/Makefile b/graphics/gstreamer1-plugins-kms/Makefile index 4cad2a24a18..41b182dabbb 100644 --- a/graphics/gstreamer1-plugins-kms/Makefile +++ b/graphics/gstreamer1-plugins-kms/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer video sink via KMS plugin diff --git a/graphics/gstreamer1-plugins-libcaca/Makefile b/graphics/gstreamer1-plugins-libcaca/Makefile index 0ec8dc93c4e..5e57adfdbb4 100644 --- a/graphics/gstreamer1-plugins-libcaca/Makefile +++ b/graphics/gstreamer1-plugins-libcaca/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer color ASCII art plugin diff --git a/graphics/gstreamer1-plugins-libvisual/Makefile b/graphics/gstreamer1-plugins-libvisual/Makefile index 13e56a29a5b..ff384e00300 100644 --- a/graphics/gstreamer1-plugins-libvisual/Makefile +++ b/graphics/gstreamer1-plugins-libvisual/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer libvisual plugin diff --git a/graphics/gstreamer1-plugins-opencv/Makefile b/graphics/gstreamer1-plugins-opencv/Makefile index d6b54653869..a4128277537 100644 --- a/graphics/gstreamer1-plugins-opencv/Makefile +++ b/graphics/gstreamer1-plugins-opencv/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= graphics COMMENT= GStreamer opencv real time computer vision plugin diff --git a/graphics/gstreamer1-plugins-openexr/Makefile b/graphics/gstreamer1-plugins-openexr/Makefile index 3841c97a425..5193be98f07 100644 --- a/graphics/gstreamer1-plugins-openexr/Makefile +++ b/graphics/gstreamer1-plugins-openexr/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= graphics COMMENT= GStreamer OpenExr codec plugin diff --git a/graphics/gstreamer1-plugins-openjpeg/Makefile b/graphics/gstreamer1-plugins-openjpeg/Makefile index dc7400b6e29..d074e68b31e 100644 --- a/graphics/gstreamer1-plugins-openjpeg/Makefile +++ b/graphics/gstreamer1-plugins-openjpeg/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer jpeg2000 decoder plugin diff --git a/graphics/gstreamer1-plugins-png/Makefile b/graphics/gstreamer1-plugins-png/Makefile index 35982e8dec1..4b9f9a32bab 100644 --- a/graphics/gstreamer1-plugins-png/Makefile +++ b/graphics/gstreamer1-plugins-png/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics PKGNAMESUFFIX= 1-plugins-png diff --git a/graphics/gstreamer1-plugins-qt/Makefile b/graphics/gstreamer1-plugins-qt/Makefile index 9d6679b9c07..b185d6f523c 100644 --- a/graphics/gstreamer1-plugins-qt/Makefile +++ b/graphics/gstreamer1-plugins-qt/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer Qt videosink plugin diff --git a/graphics/gstreamer1-plugins-rsvg/Makefile b/graphics/gstreamer1-plugins-rsvg/Makefile index 6ecf6ea73c3..9a2ae381b48 100644 --- a/graphics/gstreamer1-plugins-rsvg/Makefile +++ b/graphics/gstreamer1-plugins-rsvg/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer SVG plugin diff --git a/graphics/gstreamer1-plugins-vulkan/Makefile b/graphics/gstreamer1-plugins-vulkan/Makefile index 3d414ffaef4..33557e0ef9b 100644 --- a/graphics/gstreamer1-plugins-vulkan/Makefile +++ b/graphics/gstreamer1-plugins-vulkan/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= graphics COMMENT= GStreamer Vulkan graphics plugin diff --git a/graphics/gstreamer1-plugins-webp/Makefile b/graphics/gstreamer1-plugins-webp/Makefile index 73b0ea1bb1e..ec190d2d954 100644 --- a/graphics/gstreamer1-plugins-webp/Makefile +++ b/graphics/gstreamer1-plugins-webp/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer webp image decoder plugin diff --git a/graphics/gstreamer1-plugins-zbar/Makefile b/graphics/gstreamer1-plugins-zbar/Makefile index 2f4739f496c..36f98d8be6f 100644 --- a/graphics/gstreamer1-plugins-zbar/Makefile +++ b/graphics/gstreamer1-plugins-zbar/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= graphics COMMENT= GStreamer ZBar barcode detector plugin diff --git a/graphics/iccxml/Makefile b/graphics/iccxml/Makefile index e23aeb6b458..88876160e12 100644 --- a/graphics/iccxml/Makefile +++ b/graphics/iccxml/Makefile @@ -1,5 +1,6 @@ PORTNAME= iccxml PORTVERSION= 0.9.8 +PORTREVISION= 1 CATEGORIES= graphics MASTER_SITES= SF/${PORTNAME}/IccXML-Src/${DISTNAME} DISTNAME= IccXML-${PORTVERSION} diff --git a/graphics/ikona/Makefile b/graphics/ikona/Makefile index 1f2eabe6d7a..feb9ca49c0f 100644 --- a/graphics/ikona/Makefile +++ b/graphics/ikona/Makefile @@ -1,6 +1,6 @@ PORTNAME= ikona DISTVERSION= 1.0 -PORTREVISION= 20 +PORTREVISION= 21 CATEGORIES= graphics kde MASTER_SITES= KDE/stable/${PORTNAME}/${DISTVERSION}/ DISTFILES= ${DISTNAME}${EXTRACT_SUFX} \ diff --git a/graphics/impressive/Makefile b/graphics/impressive/Makefile index 71d39340887..243400ef841 100644 --- a/graphics/impressive/Makefile +++ b/graphics/impressive/Makefile @@ -1,8 +1,7 @@ # Created by: Veniamin Gvozdikov PORTNAME= impressive -DISTVERSION= 0.13.0-beta2 -PORTREVISION= 12 +DISTVERSION= 0.13.1 CATEGORIES= graphics python MASTER_SITES= SF/${PORTNAME}/Impressive/${DISTVERSION}/ DISTNAME= Impressive-${DISTVERSION} @@ -15,7 +14,7 @@ LICENSE_FILE= ${WRKSRC}/license.txt RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}PyOpenGL>=0:graphics/py-PyOpenGL@${PY_FLAVOR} \ ${PYGAME} \ - ${PY_PILLOW} + ${PYTHON_PKGNAMEPREFIX}pillow>=0:graphics/py-pillow@${PY_FLAVOR} USES= python:3.5+ sdl shebangfix USE_SDL= sdl @@ -24,7 +23,7 @@ SHEBANG_FILES= impressive.py NO_BUILD= yes NO_ARCH= yes PLIST_FILES= bin/${PORTNAME} \ - man/man1/${PORTNAME}.1.gz + share/man/man1/${PORTNAME}.1.gz PORTDOCS= ${PORTNAME}.html demo.pdf @@ -62,7 +61,7 @@ XPDF_RUN_DEPENDS= ${LOCALBASE}/libexec/xpdf/pdftoppm:graphics/xpdf do-install: ${INSTALL_SCRIPT} ${WRKSRC}/${PORTNAME}.py ${STAGEDIR}${PREFIX}/bin/${PORTNAME} - ${INSTALL_MAN} ${WRKSRC}/${PORTNAME}.1 ${STAGEDIR}${MAN1PREFIX}/man/man1 + ${INSTALL_MAN} ${WRKSRC}/${PORTNAME}.1 ${STAGEDIR}${PREFIX}/share/man/man1 do-install-DOCS-on: @${MKDIR} ${STAGEDIR}${DOCSDIR} diff --git a/graphics/impressive/distinfo b/graphics/impressive/distinfo index 7e205f3e119..1b3b69f9745 100644 --- a/graphics/impressive/distinfo +++ b/graphics/impressive/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1600015711 -SHA256 (Impressive-0.13.0-beta2.tar.gz) = 38a445d111cb8be7157605ff12c786bbad475b712f13a66a3bd3dd403d414a10 -SIZE (Impressive-0.13.0-beta2.tar.gz) = 311905 +TIMESTAMP = 1647898237 +SHA256 (Impressive-0.13.1.tar.gz) = 2489173bbf8ab12b449907d811030fa973d23a30e637c635f17527eaba142d34 +SIZE (Impressive-0.13.1.tar.gz) = 215989 diff --git a/graphics/inkscape/Makefile b/graphics/inkscape/Makefile index 68a126812f9..759794984bb 100644 --- a/graphics/inkscape/Makefile +++ b/graphics/inkscape/Makefile @@ -2,6 +2,7 @@ PORTNAME= inkscape DISTVERSION= 1.1.2 +PORTREVISION= 1 CATEGORIES= graphics gnome MASTER_SITES= https://inkscape.org/gallery/item/31668/ diff --git a/graphics/kf5-kimageformats/Makefile b/graphics/kf5-kimageformats/Makefile index 068c8728ea7..01d15301af2 100644 --- a/graphics/kf5-kimageformats/Makefile +++ b/graphics/kf5-kimageformats/Makefile @@ -15,7 +15,7 @@ USE_QT= core gui printsupport widgets \ CFLAGS+= -I${LOCALBASE}/include/Imath -OPTIONS_DEFINE= AVIF JXL OPENEXR +OPTIONS_DEFINE= AVIF JXL LIBHEIF OPENEXR OPTIONS_DEFAULT= AVIF JXL OPENEXR OPTIONS_SUB= yes @@ -25,6 +25,10 @@ AVIF_CMAKE_BOOL_OFF= CMAKE_DISABLE_FIND_PACKAGE_libavif JXL_LIB_DEPENDS= libjxl.so:graphics/libjxl JXL_CMAKE_BOOL= KIMAGEFORMATS_JXL +LIBHEIF_DESC= ISO/IEC 23008-12:2017 HEIF file format support +LIBHEIF_LIB_DEPENDS= libheif.so:graphics/libheif +LIBHEIF_CMAKE_BOOL= KIMAGEFORMATS_HEIF + OPENEXR_LIB_DEPENDS= libOpenEXR.so:graphics/openexr OPENEXR_CMAKE_BOOL_OFF= CMAKE_DISABLE_FIND_PACKAGE_OpenEXR diff --git a/graphics/kf5-kimageformats/pkg-plist b/graphics/kf5-kimageformats/pkg-plist index 7157ef47f2e..f0134abb4bd 100644 --- a/graphics/kf5-kimageformats/pkg-plist +++ b/graphics/kf5-kimageformats/pkg-plist @@ -1,7 +1,9 @@ +%%QT_PLUGINDIR%%/imageformats/kimg_ani.so %%AVIF%%%%QT_PLUGINDIR%%/imageformats/kimg_avif.so %%QT_PLUGINDIR%%/imageformats/kimg_eps.so %%OPENEXR%%%%QT_PLUGINDIR%%/imageformats/kimg_exr.so %%QT_PLUGINDIR%%/imageformats/kimg_hdr.so +%%LIBHEIF%%%%QT_PLUGINDIR%%/imageformats/kimg_heif.so %%JXL%%%%QT_PLUGINDIR%%/imageformats/kimg_jxl.so %%QT_PLUGINDIR%%/imageformats/kimg_kra.so %%QT_PLUGINDIR%%/imageformats/kimg_ora.so @@ -12,13 +14,13 @@ %%QT_PLUGINDIR%%/imageformats/kimg_rgb.so %%QT_PLUGINDIR%%/imageformats/kimg_tga.so %%QT_PLUGINDIR%%/imageformats/kimg_xcf.so -%%QT_PLUGINDIR%%/imageformats/kimg_ani.so share/kservices5/qimageioplugins/ani.desktop %%AVIF%%share/kservices5/qimageioplugins/avif.desktop share/kservices5/qimageioplugins/dds.desktop share/kservices5/qimageioplugins/eps.desktop %%OPENEXR%%share/kservices5/qimageioplugins/exr.desktop share/kservices5/qimageioplugins/hdr.desktop +%%LIBHEIF%%share/kservices5/qimageioplugins/heif.desktop share/kservices5/qimageioplugins/jp2.desktop %%JXL%%share/kservices5/qimageioplugins/jxl.desktop share/kservices5/qimageioplugins/kra.desktop diff --git a/graphics/libavif/pkg-plist b/graphics/libavif/pkg-plist index 23c018ec113..08f46c72a99 100644 --- a/graphics/libavif/pkg-plist +++ b/graphics/libavif/pkg-plist @@ -9,5 +9,3 @@ lib/libavif.so lib/libavif.so.13 lib/libavif.so.13.0.0 libdata/pkgconfig/libavif.pc -%%PIXBUF%%@postexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true -%%PIXBUF%%@postunexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true diff --git a/graphics/libetonyek01/Makefile b/graphics/libetonyek01/Makefile index 3eea39fbba4..28713c29063 100644 --- a/graphics/libetonyek01/Makefile +++ b/graphics/libetonyek01/Makefile @@ -1,6 +1,6 @@ PORTNAME= libetonyek PORTVERSION= 0.1.10 -PORTREVISION= 1 +PORTREVISION= 2 PORTEPOCH= 1 CATEGORIES= graphics MASTER_SITES= LODEV/${PORTNAME} diff --git a/graphics/libgeotiff/Makefile b/graphics/libgeotiff/Makefile index 428ddefdfdf..e44adaeaf8b 100644 --- a/graphics/libgeotiff/Makefile +++ b/graphics/libgeotiff/Makefile @@ -1,7 +1,7 @@ # Created by: Sam Lawrance PORTNAME= libgeotiff -PORTVERSION= 1.7.0 +PORTVERSION= 1.7.1 CATEGORIES= graphics MASTER_SITES= https://download.osgeo.org/geotiff/libgeotiff/ \ LOCAL/sunpoet diff --git a/graphics/libgeotiff/distinfo b/graphics/libgeotiff/distinfo index 6305fb46268..58a9d1cc878 100644 --- a/graphics/libgeotiff/distinfo +++ b/graphics/libgeotiff/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1625585430 -SHA256 (libgeotiff-1.7.0.tar.gz) = fc304d8839ca5947cfbeb63adb9d1aa47acef38fc6d6689e622926e672a99a7e -SIZE (libgeotiff-1.7.0.tar.gz) = 541931 +TIMESTAMP = 1647264432 +SHA256 (libgeotiff-1.7.1.tar.gz) = 05ab1347aaa471fc97347d8d4269ff0c00f30fa666d956baba37948ec87e55d6 +SIZE (libgeotiff-1.7.1.tar.gz) = 542779 diff --git a/graphics/libgnomecanvasmm26/Makefile b/graphics/libgnomecanvasmm26/Makefile index e0409d5e2bb..7d80fc07fcb 100644 --- a/graphics/libgnomecanvasmm26/Makefile +++ b/graphics/libgnomecanvasmm26/Makefile @@ -3,7 +3,7 @@ PORTNAME= libgnomecanvasmm PORTVERSION= 2.26.0 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= graphics gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/graphics/libgphoto2/Makefile b/graphics/libgphoto2/Makefile index 4be8acb20c4..89f98a487ea 100644 --- a/graphics/libgphoto2/Makefile +++ b/graphics/libgphoto2/Makefile @@ -2,6 +2,7 @@ PORTNAME= libgphoto2 PORTVERSION= 2.5.26 +PORTREVISION= 1 CATEGORIES= graphics MASTER_SITES= SF/gphoto/libgphoto/${PORTVERSION} diff --git a/graphics/libjxl/Makefile b/graphics/libjxl/Makefile index ef51f97e982..edb035777cd 100644 --- a/graphics/libjxl/Makefile +++ b/graphics/libjxl/Makefile @@ -42,6 +42,7 @@ OPTIONS_DEFINE= GIF JPEG LTO MANPAGES OPENEXR PIXBUF PNG OPTIONS_DEFAULT=GIF JPEG LTO MANPAGES OPENEXR PIXBUF PNG OPTIONS_EXCLUDE_i386= LTO # ConvolutionWithTranspose(): JXL_CHECK: out->xsize() == in.ysize() OPTIONS_EXCLUDE_powerpc64= ${"${/usr/bin/ld:L:tA}"==/usr/bin/ld.lld:?LTO:} # https://github.com/llvm/llvm-project/issues/46697 +OPTIONS_EXCLUDE_riscv64= LTO # bug 262871 OPTIONS_SUB= yes GIF_LIB_DEPENDS= libgif.so:graphics/giflib diff --git a/graphics/libjxl/pkg-plist b/graphics/libjxl/pkg-plist index 77bfe896e4c..91051800f60 100644 --- a/graphics/libjxl/pkg-plist +++ b/graphics/libjxl/pkg-plist @@ -33,5 +33,3 @@ libdata/pkgconfig/libjxl_threads.pc %%MANPAGES%%share/man/man1/djxl.1.gz share/mime/packages/image-jxl.xml %%PIXBUF%%share/thumbnailers/jxl.thumbnailer -%%PIXBUF%%@postexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true -%%PIXBUF%%@postunexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true diff --git a/graphics/libkdcraw/Makefile b/graphics/libkdcraw/Makefile index 84fd3f7eb55..2c1e15b9b4e 100644 --- a/graphics/libkdcraw/Makefile +++ b/graphics/libkdcraw/Makefile @@ -1,5 +1,6 @@ PORTNAME= libkdcraw DISTVERSION= ${KDE_APPLICATIONS_VERSION} +PORTREVISION= 1 CATEGORIES= graphics kde kde-applications MAINTAINER= kde@FreeBSD.org diff --git a/graphics/libopenraw/Makefile b/graphics/libopenraw/Makefile index 40daed57f92..230818e0fad 100644 --- a/graphics/libopenraw/Makefile +++ b/graphics/libopenraw/Makefile @@ -2,7 +2,7 @@ PORTNAME= libopenraw PORTVERSION= 0.3.0 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= graphics MASTER_SITES= http://libopenraw.freedesktop.org/download/ DISTFILES= ${DISTNAME}${EXTRACT_SUFX} diff --git a/graphics/librsvg2-rust/Makefile b/graphics/librsvg2-rust/Makefile index e26c1e61cf8..bee8df58edf 100644 --- a/graphics/librsvg2-rust/Makefile +++ b/graphics/librsvg2-rust/Makefile @@ -2,6 +2,7 @@ PORTNAME= librsvg PORTVERSION= 2.54.0 +PORTREVISION= 1 CATEGORIES= graphics gnome MASTER_SITES= GNOME PKGNAMESUFFIX= 2-rust diff --git a/graphics/librsvg2-rust/pkg-plist b/graphics/librsvg2-rust/pkg-plist index eac00705852..e44127c344e 100644 --- a/graphics/librsvg2-rust/pkg-plist +++ b/graphics/librsvg2-rust/pkg-plist @@ -131,5 +131,3 @@ man/man1/rsvg-convert.1.gz share/gir-1.0/Rsvg-2.0.gir share/thumbnailers/librsvg.thumbnailer share/vala/vapi/librsvg-2.0.vapi -@postexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true -@postunexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true diff --git a/graphics/librsvg2/Makefile b/graphics/librsvg2/Makefile index dc692002765..4a6012ff7fb 100644 --- a/graphics/librsvg2/Makefile +++ b/graphics/librsvg2/Makefile @@ -2,7 +2,7 @@ PORTNAME= librsvg PORTVERSION= 2.40.21 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= graphics gnome MASTER_SITES= GNOME PKGNAMESUFFIX= 2 diff --git a/graphics/librsvg2/pkg-plist b/graphics/librsvg2/pkg-plist index 07d52269c0e..d5d4006c0c1 100644 --- a/graphics/librsvg2/pkg-plist +++ b/graphics/librsvg2/pkg-plist @@ -35,5 +35,3 @@ man/man1/rsvg-convert.1.gz share/gir-1.0/Rsvg-2.0.gir share/thumbnailers/librsvg.thumbnailer %%VAPI%%share/vala/vapi/librsvg-2.0.vapi -@postexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true -@postunexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true diff --git a/graphics/libsvg/Makefile b/graphics/libsvg/Makefile index cccf62df900..361b06a0cfe 100644 --- a/graphics/libsvg/Makefile +++ b/graphics/libsvg/Makefile @@ -3,7 +3,7 @@ PORTNAME= libsvg PORTVERSION= 0.1.4 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= graphics MASTER_SITES= http://cairographics.org/snapshots/ diff --git a/graphics/mesa-devel/Makefile b/graphics/mesa-devel/Makefile index 579c77e6ed1..7b58cc39b67 100644 --- a/graphics/mesa-devel/Makefile +++ b/graphics/mesa-devel/Makefile @@ -1,6 +1,6 @@ PORTNAME= mesa -DISTVERSION= 22.0-branchpoint-1798 -DISTVERSIONSUFFIX= -gd0e99e566f6 +DISTVERSION= 22.0-branchpoint-1911 +DISTVERSIONSUFFIX= -g3e9bd67f234 CATEGORIES= graphics PKGNAMESUFFIX= -devel diff --git a/graphics/mesa-devel/distinfo b/graphics/mesa-devel/distinfo index 8b3dae8ffcf..dca521b258e 100644 --- a/graphics/mesa-devel/distinfo +++ b/graphics/mesa-devel/distinfo @@ -1,6 +1,6 @@ -TIMESTAMP = 1648080260 -SHA256 (mesa3d-mesa-22.0-branchpoint-1798-gd0e99e566f6_GH0.tar.gz) = fe295a2a4225e6aa42b7c9c55c397ef462a58bb8dda9d56fb75a0bf5de9076ee -SIZE (mesa3d-mesa-22.0-branchpoint-1798-gd0e99e566f6_GH0.tar.gz) = 24420425 +TIMESTAMP = 1648253207 +SHA256 (mesa3d-mesa-22.0-branchpoint-1911-g3e9bd67f234_GH0.tar.gz) = 9659770c0e8681a811730d1dc1ab255cd02fe04bdfa337009fae51ea54e0fade +SIZE (mesa3d-mesa-22.0-branchpoint-1911-g3e9bd67f234_GH0.tar.gz) = 24550091 SHA256 (700efacda59c.patch) = f034cfbe09edff0baba67e46e7e3812fdef73ff3cf3e579050c024c95234c8d5 SIZE (700efacda59c.patch) = 981 SHA256 (50433886a3e3.patch) = 15af265e9dbb5dec7514062cfa549d1c1053f567395d9d133611c2a5138da470 diff --git a/graphics/mesa-dri/pkg-plist b/graphics/mesa-dri/pkg-plist index e52040564e8..a08ffff90ed 100644 --- a/graphics/mesa-dri/pkg-plist +++ b/graphics/mesa-dri/pkg-plist @@ -35,6 +35,7 @@ include/GL/internal/dri_interface.h libdata/pkgconfig/dri.pc @comment libdata/pkgconfig/gbm.pc share/drirc.d/00-mesa-defaults.conf +share/drirc.d/00-radv-defaults.conf share/drirc.d/01-freebsd.conf %%INTEL_VDRIVER%%share/vulkan/icd.d/intel_icd.%%ARCH%%.json %%AMD_VDRIVER%%share/vulkan/icd.d/radeon_icd.%%ARCH%%.json diff --git a/graphics/minder/Makefile b/graphics/minder/Makefile index e1963bffb1d..0ea4ca12720 100644 --- a/graphics/minder/Makefile +++ b/graphics/minder/Makefile @@ -1,5 +1,6 @@ PORTNAME= minder PORTVERSION= 1.14.0 +PORTREVISION= 1 CATEGORIES= graphics # avoid PKGBASE collision with net-p2p/minder PKGNAMESUFFIX= -app diff --git a/graphics/mypaint-brushes/Makefile b/graphics/mypaint-brushes/Makefile index fc518ef89d4..37cac995720 100644 --- a/graphics/mypaint-brushes/Makefile +++ b/graphics/mypaint-brushes/Makefile @@ -16,4 +16,6 @@ USE_GITHUB=yes GH_ACCOUNT= mypaint +NO_ARCH= yes + .include diff --git a/graphics/nip2/Makefile b/graphics/nip2/Makefile index e72fcd0afd0..04c46031e86 100644 --- a/graphics/nip2/Makefile +++ b/graphics/nip2/Makefile @@ -2,7 +2,7 @@ PORTNAME= nip2 PORTVERSION= 8.7.1 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= graphics MASTER_SITES= https://github.com/libvips/${PORTNAME}/releases/download/v${PORTVERSION}/ diff --git a/graphics/opencollada/Makefile b/graphics/opencollada/Makefile index 303f6db0059..30b6cc7362d 100644 --- a/graphics/opencollada/Makefile +++ b/graphics/opencollada/Makefile @@ -1,7 +1,7 @@ PORTNAME= opencollada DISTVERSIONPREFIX= v DISTVERSION= 1.6.68 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= graphics devel MAINTAINER= sunpoet@FreeBSD.org diff --git a/graphics/opencollada/files/patch-clang13 b/graphics/opencollada/files/patch-clang13 new file mode 100644 index 00000000000..80ec2e2da92 --- /dev/null +++ b/graphics/opencollada/files/patch-clang13 @@ -0,0 +1,11 @@ +--- common/libBuffer/include/CommonFWriteBufferFlusher.h.orig 2018-11-26 22:43:10 UTC ++++ common/libBuffer/include/CommonFWriteBufferFlusher.h +@@ -19,6 +19,8 @@ + #else + # include + #endif ++#elif defined(__clang_major__) && __clang_major__ >= 13 ++# include + #else + # include + #endif diff --git a/graphics/opendx/Makefile b/graphics/opendx/Makefile index e2d31a69bec..bbae21841a9 100644 --- a/graphics/opendx/Makefile +++ b/graphics/opendx/Makefile @@ -15,8 +15,7 @@ LICENSE_NAME= IBM PUBLIC LICENSE - Open Visualization Data Explorer VERSION 1.0 LICENSE_FILE= ${WRKSRC}/LICENSE LICENSE_PERMS= dist-mirror dist-sell pkg-mirror pkg-sell auto-accept -NOT_FOR_ARCHS= arm64 -NOT_FOR_ARCHS_REASON= does not build +BROKEN_aarch64= fails to link: undefined symbol: sbrk LIB_DEPENDS= libtiff.so:graphics/tiff \ libcdf.so:science/cdf diff --git a/graphics/openfx-arena/Makefile b/graphics/openfx-arena/Makefile index 550229f7923..428215ec86d 100644 --- a/graphics/openfx-arena/Makefile +++ b/graphics/openfx-arena/Makefile @@ -1,7 +1,7 @@ PORTNAME= openfx-arena PORTVERSION= 2.3.14 DISTVERSIONPREFIX= Natron- -PORTREVISION= 47 +PORTREVISION= 48 CATEGORIES= graphics PATCH_SITES= https://github.com/${GH_ACCOUNT}/${GH_PROJECT}/commit/ diff --git a/graphics/oyranos/Makefile b/graphics/oyranos/Makefile index 7d9f9616b77..4dae5a38cae 100644 --- a/graphics/oyranos/Makefile +++ b/graphics/oyranos/Makefile @@ -1,6 +1,6 @@ PORTNAME= oyranos PORTVERSION= 0.9.6 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= graphics MAINTAINER= bsam@FreeBSD.org diff --git a/graphics/p5-Alien-Gimp/Makefile b/graphics/p5-Alien-Gimp/Makefile index f01669513db..44aa1289aa4 100644 --- a/graphics/p5-Alien-Gimp/Makefile +++ b/graphics/p5-Alien-Gimp/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= Alien-Gimp -PORTVERSION= 0.09 +PORTVERSION= 0.10 CATEGORIES= graphics perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/graphics/p5-Alien-Gimp/distinfo b/graphics/p5-Alien-Gimp/distinfo index 6f49599ebbc..96544d677fe 100644 --- a/graphics/p5-Alien-Gimp/distinfo +++ b/graphics/p5-Alien-Gimp/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1619198441 -SHA256 (Alien-Gimp-0.09.tar.gz) = 0915259b642f2724a6009e3034d41465e6ac0ed49c787d49f1f4967930d93c3a -SIZE (Alien-Gimp-0.09.tar.gz) = 3025 +TIMESTAMP = 1647264478 +SHA256 (Alien-Gimp-0.10.tar.gz) = 715fc20fd4cc26e3f283edade26e15a5eccb598c346d3733308e2a6e66edfd05 +SIZE (Alien-Gimp-0.10.tar.gz) = 3344 diff --git a/graphics/p5-Image-ExifTool/Makefile b/graphics/p5-Image-ExifTool/Makefile index e0c5708a152..c29e02bf72c 100644 --- a/graphics/p5-Image-ExifTool/Makefile +++ b/graphics/p5-Image-ExifTool/Makefile @@ -1,14 +1,14 @@ # Created by: Tod McQuillin PORTNAME= Image-ExifTool -PORTVERSION= 12.00 +PORTVERSION= 12.30 CATEGORIES= graphics perl5 -MASTER_SITES= https://sno.phy.queensu.ca/~phil/exiftool/ \ +MASTER_SITES= https://exiftool.org/ \ CPAN MASTER_SITE_SUBDIR= CPAN:EXIFTOOL PKGNAMEPREFIX= p5- -MAINTAINER= devin@sevenlayer.studio +MAINTAINER= devnull@apt322.org COMMENT= Perl module for getting EXIF data from image files LICENSE= ART10 GPLv1+ diff --git a/graphics/p5-Image-ExifTool/distinfo b/graphics/p5-Image-ExifTool/distinfo index bfefde2424d..72406cd7622 100644 --- a/graphics/p5-Image-ExifTool/distinfo +++ b/graphics/p5-Image-ExifTool/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1592880859 -SHA256 (Image-ExifTool-12.00.tar.gz) = d0792cc94ab58a8b3d81b18ccdb8b43848c8fb901b5b7caecdcb68689c6c855a -SIZE (Image-ExifTool-12.00.tar.gz) = 4817545 +TIMESTAMP = 1642710069 +SHA256 (Image-ExifTool-12.30.tar.gz) = 3be7cda70b471df589c75a4adbb71bae62e633022b0ba62585f3bcd91b35544f +SIZE (Image-ExifTool-12.30.tar.gz) = 4948601 diff --git a/graphics/p5-Image-ExifTool/pkg-descr b/graphics/p5-Image-ExifTool/pkg-descr index 6c380f812ed..a4f50778a99 100644 --- a/graphics/p5-Image-ExifTool/pkg-descr +++ b/graphics/p5-Image-ExifTool/pkg-descr @@ -1,17 +1,20 @@ -ExifTool is a highly customizable Perl script and module for reading and -writing meta information in images. +ExifTool is a platform-independent Perl library plus a command-line +application for reading, writing and editing meta information. ExifTool reads EXIF, GPS, IPTC, XMP, GeoTIFF, ICC Profile and Photoshop -IRB and ID3 meta information from JPG, JP2, TIFF, GIF, BMP, PICT, QTIF, -PNG, MNG, JNG, MIFF, PPM, PGM, PBM, XMP, EPS, PS, AI, PDF, PSD, DCM, -ACR, THM, CRW, CR2, MRW, NEF, PEF, ORF, RAF, RAW, SRF, MOS, X3F and DNG -images, MP3 and WAV audio files, and AVI, MOV and MP4 videos. ExifTool -also extracts information from the maker notes of many digital cameras -by various manufacturers including Canon, Casio, FujiFilm, JVC/Victor, -Kodak, Leaf, Minolta/Konica-Minolta, Nikon, Olympus/Epson, -Panasonic/Leica, Pentax/Asahi, Ricoh, Sanyo and Sigma/Foveon. +meta information from many image files, like ACR, AFM, AI, BMP, DCM, CR2, +CRW, DNG, DPX, GIF, EPS, JNG, JPG, MIFF, MNG, MOS, MRW, NEF, ORF, RAF, +RAW, PBM, PDF, PEF, PICT, PGM, PNG, PPM, PS, SRF, QTIF, THM, X3F, XMP. -ExifTool writes EXIF, GPS, IPTC, XMP and MakerNotes meta information to +Many audio files like AAX, AIF, APE, FLAC, LA, MP3, MPC, OFR, OPUS, PAC. +Many video files like 360, AVI, DV, F4A, FLV, LRV, M4A, MKV, MQV, RMVB. + +ExifTool also extracts information from the maker notes of many digital cameras +by various manufacturers including Canon, Casio, FujiFilm, JVC/Victor, Kodak, +Leaf, Minolta/Konica-Minolta, Nikon, Olympus/Epson, anasonic/Leica, +Pentax/Asahi, Ricoh, Sanyo and Sigma/Foveon. + +ExifTool also writes EXIF, GPS, IPTC, XMP and MakerNotes meta information to JPEG, TIFF, GIF, CRW, THM, CR2, NEF, PEF and DNG images. -WWW: https://sno.phy.queensu.ca/~phil/exiftool/ +WWW: https://exiftool.org diff --git a/graphics/p5-Image-ExifTool/pkg-plist b/graphics/p5-Image-ExifTool/pkg-plist index 0893a79662a..3ecd852b1f5 100644 --- a/graphics/p5-Image-ExifTool/pkg-plist +++ b/graphics/p5-Image-ExifTool/pkg-plist @@ -100,6 +100,7 @@ bin/exiftool %%SITE_PERL%%/Image/ExifTool/Jpeg2000.pm %%SITE_PERL%%/Image/ExifTool/Kodak.pm %%SITE_PERL%%/Image/ExifTool/KyoceraRaw.pm +%%SITE_PERL%%/Image/ExifTool/LIF.pm %%SITE_PERL%%/Image/ExifTool/LNK.pm %%SITE_PERL%%/Image/ExifTool/Lang/cs.pm %%SITE_PERL%%/Image/ExifTool/Lang/de.pm @@ -129,6 +130,7 @@ bin/exiftool %%SITE_PERL%%/Image/ExifTool/MPC.pm %%SITE_PERL%%/Image/ExifTool/MPEG.pm %%SITE_PERL%%/Image/ExifTool/MPF.pm +%%SITE_PERL%%/Image/ExifTool/MRC.pm %%SITE_PERL%%/Image/ExifTool/MWG.pm %%SITE_PERL%%/Image/ExifTool/MXF.pm %%SITE_PERL%%/Image/ExifTool/MacOS.pm @@ -141,12 +143,14 @@ bin/exiftool %%SITE_PERL%%/Image/ExifTool/Nikon.pm %%SITE_PERL%%/Image/ExifTool/NikonCapture.pm %%SITE_PERL%%/Image/ExifTool/NikonCustom.pm +%%SITE_PERL%%/Image/ExifTool/NikonSettings.pm %%SITE_PERL%%/Image/ExifTool/Nintendo.pm %%SITE_PERL%%/Image/ExifTool/OOXML.pm %%SITE_PERL%%/Image/ExifTool/Ogg.pm %%SITE_PERL%%/Image/ExifTool/Olympus.pm %%SITE_PERL%%/Image/ExifTool/OpenEXR.pm %%SITE_PERL%%/Image/ExifTool/Opus.pm +%%SITE_PERL%%/Image/ExifTool/Other.pm %%SITE_PERL%%/Image/ExifTool/PCX.pm %%SITE_PERL%%/Image/ExifTool/PDF.pm %%SITE_PERL%%/Image/ExifTool/PGF.pm @@ -215,6 +219,7 @@ bin/exiftool %%SITE_PERL%%/Image/ExifTool/XMP2.pl %%SITE_PERL%%/Image/ExifTool/XMPStruct.pl %%SITE_PERL%%/Image/ExifTool/ZIP.pm +%%SITE_PERL%%/Image/ExifTool/ZISRAW.pm %%SITE_PERL%%/Image/ExifTool/iWork.pm %%PERL5_MAN1%%/exiftool.1.gz %%PERL5_MAN3%%/File::RandomAccess.3.gz @@ -248,8 +253,8 @@ bin/exiftool %%PERL5_MAN3%%/Image::ExifTool::DjVu.3.gz %%PERL5_MAN3%%/Image::ExifTool::EXE.3.gz %%PERL5_MAN3%%/Image::ExifTool::Exif.3.gz -%%PERL5_MAN3%%/Image::ExifTool::FLAC.3.gz %%PERL5_MAN3%%/Image::ExifTool::FITS.3.gz +%%PERL5_MAN3%%/Image::ExifTool::FLAC.3.gz %%PERL5_MAN3%%/Image::ExifTool::FLIF.3.gz %%PERL5_MAN3%%/Image::ExifTool::FLIR.3.gz %%PERL5_MAN3%%/Image::ExifTool::Fixup.3.gz @@ -283,6 +288,7 @@ bin/exiftool %%PERL5_MAN3%%/Image::ExifTool::Jpeg2000.3.gz %%PERL5_MAN3%%/Image::ExifTool::Kodak.3.gz %%PERL5_MAN3%%/Image::ExifTool::KyoceraRaw.3.gz +%%PERL5_MAN3%%/Image::ExifTool::LIF.3.gz %%PERL5_MAN3%%/Image::ExifTool::LNK.3.gz %%PERL5_MAN3%%/Image::ExifTool::Lang::cs.3.gz %%PERL5_MAN3%%/Image::ExifTool::Lang::de.3.gz @@ -312,6 +318,7 @@ bin/exiftool %%PERL5_MAN3%%/Image::ExifTool::MPC.3.gz %%PERL5_MAN3%%/Image::ExifTool::MPEG.3.gz %%PERL5_MAN3%%/Image::ExifTool::MPF.3.gz +%%PERL5_MAN3%%/Image::ExifTool::MRC.3.gz %%PERL5_MAN3%%/Image::ExifTool::MWG.3.gz %%PERL5_MAN3%%/Image::ExifTool::MXF.3.gz %%PERL5_MAN3%%/Image::ExifTool::MacOS.3.gz @@ -324,13 +331,14 @@ bin/exiftool %%PERL5_MAN3%%/Image::ExifTool::Nikon.3.gz %%PERL5_MAN3%%/Image::ExifTool::NikonCapture.3.gz %%PERL5_MAN3%%/Image::ExifTool::NikonCustom.3.gz +%%PERL5_MAN3%%/Image::ExifTool::NikonSettings.3.gz %%PERL5_MAN3%%/Image::ExifTool::Nintendo.3.gz %%PERL5_MAN3%%/Image::ExifTool::OOXML.3.gz %%PERL5_MAN3%%/Image::ExifTool::Ogg.3.gz %%PERL5_MAN3%%/Image::ExifTool::Olympus.3.gz %%PERL5_MAN3%%/Image::ExifTool::OpenEXR.3.gz %%PERL5_MAN3%%/Image::ExifTool::Opus.3.gz -%%PERL5_MAN3%%/Image::ExifTool::Parrot.3.gz +%%PERL5_MAN3%%/Image::ExifTool::Other.3.gz %%PERL5_MAN3%%/Image::ExifTool::PCX.3.gz %%PERL5_MAN3%%/Image::ExifTool::PDF.3.gz %%PERL5_MAN3%%/Image::ExifTool::PGF.3.gz @@ -343,6 +351,7 @@ bin/exiftool %%PERL5_MAN3%%/Image::ExifTool::Palm.3.gz %%PERL5_MAN3%%/Image::ExifTool::Panasonic.3.gz %%PERL5_MAN3%%/Image::ExifTool::PanasonicRaw.3.gz +%%PERL5_MAN3%%/Image::ExifTool::Parrot.3.gz %%PERL5_MAN3%%/Image::ExifTool::Pentax.3.gz %%PERL5_MAN3%%/Image::ExifTool::PhaseOne.3.gz %%PERL5_MAN3%%/Image::ExifTool::PhotoCD.3.gz @@ -397,4 +406,5 @@ bin/exiftool %%PERL5_MAN3%%/Image::ExifTool::XMP2.3.gz %%PERL5_MAN3%%/Image::ExifTool::XMPStruct.3.gz %%PERL5_MAN3%%/Image::ExifTool::ZIP.3.gz +%%PERL5_MAN3%%/Image::ExifTool::ZISRAW.3.gz %%PERL5_MAN3%%/Image::ExifTool::iWork.3.gz diff --git a/graphics/photoflow/Makefile b/graphics/photoflow/Makefile index 213cfe0709d..bd814061ab2 100644 --- a/graphics/photoflow/Makefile +++ b/graphics/photoflow/Makefile @@ -1,6 +1,6 @@ PORTNAME= photoflow DISTVERSION= 0.2.8.20200828 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= graphics MAINTAINER= yuri@FreeBSD.org diff --git a/graphics/piddle/files/patch-2to3 b/graphics/piddle/files/patch-2to3 new file mode 100644 index 00000000000..2393103c210 --- /dev/null +++ b/graphics/piddle/files/patch-2to3 @@ -0,0 +1,2238 @@ +--- src/piddle/PixMapWrapper.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/PixMapWrapper.py +@@ -93,7 +93,7 @@ class PixMapWrapper: + + def __setattr__(self, attr, val): + if attr == 'baseAddr': +- raise 'UseErr', "don't assign to .baseAddr -- assign to .data instead" ++ raise('UseErr', "don't assign to .baseAddr -- assign to .data instead") + elif attr == 'data': + self.__dict__['data'] = val + self._stuff('baseAddr', id(self.data) + MacOS.string_id_to_buffer) +@@ -109,7 +109,7 @@ class PixMapWrapper: + elif attr == 'hRes' or attr == 'vRes': + # 16.16 fixed format, so just shift 16 bits + self._stuff(attr, int(val) << 16) +- elif attr in _pmElemFormat.keys(): ++ elif attr in list(_pmElemFormat.keys()): + # any other pm attribute -- just stuff + self._stuff(attr, val) + else: +@@ -129,7 +129,7 @@ class PixMapWrapper: + elif attr == 'hRes' or attr == 'vRes': + # 16.16 fixed format, so just shift 16 bits + return self._unstuff(attr) >> 16 +- elif attr in _pmElemFormat.keys(): ++ elif attr in list(_pmElemFormat.keys()): + # any other pm attribute -- just unstuff + return self._unstuff(attr) + else: +@@ -151,7 +151,7 @@ class PixMapWrapper: + if y2 == None: + dest[3] = y1 + src[3]-src[1] + if not port: port = Qd.GetPort() +- print "blit port:", port ++ print("blit port:", port) + Qd.CopyBits(self.PixMap(), port.portBits, src, tuple(dest), + QuickDraw.srcCopy, None) + +@@ -186,7 +186,7 @@ class PixMapWrapper: + # so convert if necessary + if format != imgformat.macrgb and format != imgformat.macrgb16: + # (LATER!) +- raise "NotImplementedError", "conversion to macrgb or macrgb16" ++ raise("NotImplementedError", "conversion to macrgb or macrgb16") + self.data = s + self.bounds = (0,0,width,height) + self.cmpCount = 3 +@@ -207,7 +207,7 @@ class PixMapWrapper: + return self.data + # otherwise, convert to the requested format + # (LATER!) +- raise "NotImplementedError", "data format conversion" ++ raise("NotImplementedError", "data format conversion") + + def fromImage(self,im): + """Initialize this PixMap from a PIL Image object.""" +--- src/piddle/aigen.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/aigen.py +@@ -344,7 +344,7 @@ class FontCache: + + def loadfont(self, fontname): + filename = AFMDIR + os.sep + fontname + '.afm' +- print 'cache loading',filename ++ print('cache loading',filename) + assert os.path.exists(filename) + widths = parseAFMfile(filename) + self.__widtharrays[fontname] = widths +@@ -357,7 +357,7 @@ class FontCache: + return self.__widtharrays[fontname] + except: + # font not found, use Courier +- print 'Font',fontname,'not found - using Courier for widths' ++ print('Font',fontname,'not found - using Courier for widths') + return self.getfont('courier') + + +@@ -369,7 +369,7 @@ class FontCache: + return w + def status(self): + #returns loaded fonts +- return self.__widtharrays.keys() ++ return list(self.__widtharrays.keys()) + + TheFontCache = FontCache() + +@@ -443,8 +443,8 @@ class AIDocument: + self.transforms.originy = 0 + # self.info.boundingBox = boundingbox + lx,ly, ux,uy, tx = boundingbox +- print 'setBoundingBox', lx,ly, ux,uy, tx +- print 'setBoundingBox', ux-lx,uy-ly ++ print('setBoundingBox', lx,ly, ux,uy, tx) ++ print('setBoundingBox', ux-lx,uy-ly) + self.info.pagesize = (ux-lx), (uy-ly) + ##XXX If the ArtSize is smaller than Letter Freehand always draws the + ##XXX origin as if the Art board was Letter sized, however the arboard +@@ -459,7 +459,7 @@ class AIDocument: + # print self.transforms + # print self.transforms.originx + # print self.transforms.originy +- print 'setBoundingBox', lx,ly, ux,uy ++ print('setBoundingBox', lx,ly, ux,uy) + self.info.boundingBox = lx, ly, ux, uy + self.transforms.height = uy + +@@ -479,16 +479,16 @@ class AIDocument: + + def printXref(self): + self.startxref = sys.stdout.tell() +- print 'xref' +- print 0,len(self.objects) + 1 +- print '0000000000 65535 f' ++ print('xref') ++ print(0,len(self.objects) + 1) ++ print('0000000000 65535 f') + for pos in self.xref: +- print '%0.10d 00000 n' % pos ++ print('%0.10d 00000 n' % pos) + + def printTrailer(self): +- print '''%%PageTrailer ++ print('''%%PageTrailer + gsave annotatepage grestore showpage +-%%Trailer''' ++%%Trailer''') + # print '<< /Size %d /Root %d 0 R /Info %d 0 R>>' % (len(self.objects) + 1, 1, self.infopos) + # print 'startxref' + # print self.startxref +@@ -496,7 +496,6 @@ gsave annotatepage grestore showpage + def printAI(self): + "prints it to standard output. Logs positions for doing trailer" + # print "%AI-1.0" +-# print "%’“¦²" + i = 1 + self.xref = [] + # print self.objects +@@ -510,7 +509,7 @@ gsave annotatepage grestore showpage + # i = i + 1 + # self.printXref() + self.printTrailer() +- print "%%EOF", ++ print("%%EOF", end=' ') + + + def addPage(self, page): +@@ -570,10 +569,10 @@ class OutputGrabber: + def testOutputGrabber(): + gr = OutputGrabber() + for i in range(10): +- print 'line',i ++ print('line',i) + data = gr.getData() + gr.close() +- print 'Data...',data ++ print('Data...',data) + + + ############################################################## +@@ -587,7 +586,7 @@ def testOutputGrabber(): + class AIObject: + "Base class for all AI objects" + def printAI(self): +- print '% base AI object' ++ print('% base AI object') + + + class AILiteral(AIObject): +@@ -595,7 +594,7 @@ class AILiteral(AIObject): + def __init__(self, text): + self.text = text + def printAI(self): +- print self.text ++ print(self.text) + + + +@@ -608,7 +607,7 @@ class AICatalog(AIObject): + /Outlines %d 0 R + >>''' + def printAI(self): +- print self.template % (self.RefPages, self.RefOutlines) ++ print(self.template % (self.RefPages, self.RefOutlines)) + + class AIHeader(AIObject): + # no features implemented yet +@@ -622,26 +621,26 @@ class AIHeader(AIObject): + self.datestr = time.strftime("%x %I:%M %p", now) + + def printAI(self): +- print "%!PS-Adobe-3.0" +- print "%%Creator: PIDDLE Adobe Illustrator backend" +- print "%%Title: " +'(%s)' % self.title +- print "%%For: " +'(%s)' % self.author +- print "%%CreationDate: " +'(%s)' % self.datestr +- print "%%DocumentProcessColors: Black""" +- print '%%BoundingBox: ' + '%s %s %s %s' % self.boundingBox ++ print("%!PS-Adobe-3.0") ++ print("%%Creator: PIDDLE Adobe Illustrator backend") ++ print("%%Title: " +'(%s)' % self.title) ++ print("%%For: " +'(%s)' % self.author) ++ print("%%CreationDate: " +'(%s)' % self.datestr) ++ print("%%DocumentProcessColors: Black""") ++ print('%%BoundingBox: ' + '%s %s %s %s' % self.boundingBox) + #%%DocumentProcessColors: Cyan Magenta Yellow + #%%DocumentCustomColors: (PANTONE 156 CV) + #%%RGBCustomColor: red green blue (customcolorname) + #%%DocumentFonts: CooperBlack + #%%+ Minion-Regular + #%%DocumentFiles: WrathOfRalph +- print "%AI5_FileFormat 3" +- print "%AI3_ColorUsage: Color" +- print '%AI5_ArtSize: ' + '%s %s' % self.pagesize +- print '%AI5_Templatebox: ' + '%s %s' % self.pagesize ++ print("%AI5_FileFormat 3") ++ print("%AI3_ColorUsage: Color") ++ print('%AI5_ArtSize: ' + '%s %s' % self.pagesize) ++ print('%AI5_Templatebox: ' + '%s %s' % self.pagesize) + #%AI7_ImageSettings: flag +- print '%AI5_TargetResolution: 300' +- print '%%EndComments' ++ print('%AI5_TargetResolution: 300') ++ print('%%EndComments') + + + class AIProlog(AIObject): +@@ -649,18 +648,18 @@ class AIProlog(AIObject): + def __init__(self): + self.FontList = [] + def printAI(self): +- print '%%BeginProlog' +- print '%%EndProlog' ++ print('%%BeginProlog') ++ print('%%EndProlog') + + class AISetUp(AIObject): + "null outline, does nothing yet" + def __init__(self): + self.FontList = [] + def printAI(self): +- print '%%BeginSetup' ++ print('%%BeginSetup') + if self.FontList: + pass +- print '%%EndSetup' ++ print('%%EndSetup') + + class AIPageCollection(AIObject): + "presumes PageList attribute set (list of integers)" +@@ -671,7 +670,7 @@ class AIPageCollection(AIObject): + for page in self.PageList: + result = result + str(page) + ' 0 R ' + result = result + ']\n>>' +- print result ++ print(result) + + #class AIBody(AIObject): + # """The Bastard. Needs list of Resources etc. Use a standard one for now. +@@ -776,14 +775,14 @@ class AIStream(AIObject): + + + # print '<< /Length %d >>' % length +- print '''%AI5_BeginLayer ++ print('''%AI5_BeginLayer + 1 1 1 1 0 0 0 79 128 255 Lb +-(Foreground) Ln''' +- print self.transformAI(self.originx, self.originy, self.height) ++(Foreground) Ln''') ++ print(self.transformAI(self.originx, self.originy, self.height)) + + # print 'XXXX', self.data +- print '''LB +-%AI5_EndLayer--''' ++ print('''LB ++%AI5_EndLayer--''') + + def transformAI(self, ox, oy, ty): + # print 'transformAI', ox, oy +@@ -806,7 +805,7 @@ class AIStream(AIObject): + + class AIImage(AIObject): + def printAI(self): +- print """<< ++ print("""<< + /Type /XObject + /Subtype /Image + /Name /Im0 +@@ -823,7 +822,7 @@ stream + B2BBC2 BB6F84 31BFC2 18EA3C 0E3E00 07FC00 03F800 + 1E1800 1FF800> + endstream +-endobj""" ++endobj""") + + class AIType1Font(AIObject): + def __init__(self, key, font): +@@ -837,11 +836,11 @@ class AIType1Font(AIObject): + /Encoding /WinAnsiEncoding + >>""" + def printAI(self): +- print self.template % (self.keyname, self.fontname) ++ print(self.template % (self.keyname, self.fontname)) + + class AIProcSet(AIObject): + def printAI(self): +- print "[/AI /Text]" ++ print("[/AI /Text]") + + + +@@ -876,4 +875,4 @@ def MakeFontDictionary(startpos, count): + + + #if __name__ == '__main__': +-# print 'For test scripts, run test1.py to test7.py' +\ No newline at end of file ++# print 'For test scripts, run test1.py to test7.py' +--- src/piddle/discipline.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/discipline.py +@@ -4,28 +4,28 @@ from piddle import * + def checkMethods(parentMethod, childMethod): + "Make sure the child's method obey's the parent's interface; return 1 if OK." + # get the parameter names +- pf = parentMethod.func_code +- cf = childMethod.func_code ++ pf = parentMethod.__code__ ++ cf = childMethod.__code__ + pargs = pf.co_varnames[:pf.co_argcount] + cargs = cf.co_varnames[:cf.co_argcount] + + # make sure they match, at least as far as the parent's go + if len(cargs) < len(pargs): +- print "too few args" ++ print("too few args") + return 0 + for i in range(len(pargs)): + if pargs[i] != cargs[i]: +- print "arg names don't match" ++ print("arg names don't match") + return 0 + + # if child has any additional arguments, make sure + # they have default values + extras = len(cargs) - len(pargs) +- defs = childMethod.func_defaults ++ defs = childMethod.__defaults__ + if extras and (defs is None or len(defs) < extras): +- print "need %s defaults, got %s" % (extras, defs) +- print cargs +- print pargs ++ print("need %s defaults, got %s" % (extras, defs)) ++ print(cargs) ++ print(pargs) + return 0 + + # otherwise, it's OK +@@ -41,17 +41,17 @@ def checkClasses(parent, child): + if type(item) != MethodType or name[0] == '_': + pass # print " %s is not a public method" % name + elif name in parentDir: +- if not checkMethods(getattr(parent, name).im_func, item.im_func): +- print "NAUGHTY CHILD disobeys arguments to", name ++ if not checkMethods(getattr(parent, name).__func__, item.__func__): ++ print("NAUGHTY CHILD disobeys arguments to", name) + else: +- print " %s looks OK" % name ++ print(" %s looks OK" % name) + else: +- print " %s is unique to the child" % name ++ print(" %s is unique to the child" % name) + +-foo = raw_input("backend to check (e.g., PDF):") ++foo = input("backend to check (e.g., PDF):") + if foo: + canvasname = foo+"Canvas" + module = __import__("piddle"+foo, globals(), locals(), [canvasname] ) + child = getattr(module, canvasname) +- print "\nChecking %s...\n" % canvasname ++ print("\nChecking %s...\n" % canvasname) + checkClasses( Canvas, child ) +--- src/piddle/pdfdoc.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/pdfdoc.py +@@ -18,7 +18,7 @@ import sys + import string + import time + import tempfile +-import cStringIO ++import io + from types import * + from math import sin, cos, pi, ceil + +@@ -152,10 +152,10 @@ class PDFDocument: + + + def printTrailer(self): +- print 'trailer' +- print '<< /Size %d /Root %d 0 R /Info %d 0 R>>' % (len(self.objects) + 1, 1, self.infopos) +- print 'startxref' +- print self.startxref ++ print('trailer') ++ print(('<< /Size %d /Root %d 0 R /Info %d 0 R>>' % (len(self.objects) + 1, 1, self.infopos))) ++ print('startxref') ++ print((self.startxref)) + + def writeTrailer(self, f): + f.write('trailer' + LINEEND) +@@ -176,7 +176,6 @@ class PDFDocument: + i = 1 + self.xref = [] + f.write("%PDF-1.2" + LINEEND) # for CID support +- f.write("%íì¶¾" + LINEEND) + for obj in self.objects: + pos = f.tell() + self.xref.append(pos) +@@ -201,20 +200,19 @@ class PDFDocument: + + def printPDF(self): + "prints it to standard output. Logs positions for doing trailer" +- print "%PDF-1.0" +- print "%íì¶¾" ++ print("%PDF-1.0") + i = 1 + self.xref = [] + for obj in self.objects: + pos = sys.stdout.tell() + self.xref.append(pos) +- print i, '0 obj' ++ print(i, '0 obj') + obj.printPDF() +- print 'endobj' ++ print('endobj') + i = i + 1 + self.printXref() + self.printTrailer() +- print "%%EOF", ++ print("%%EOF", end=' ') + + def addPage(self, page): + """adds page and stream at end. Maintains pages list""" +@@ -235,16 +233,16 @@ class PDFDocument: + #self.objects.append(page.stream) + + def hasFont(self, psfontname): +- return self.fontMapping.has_key(psfontname) ++ return psfontname in self.fontMapping + + def getInternalFontName(self, psfontname): + try: + return self.fontMapping[psfontname] + except: +- raise PDFError, "Font %s not available in document" % psfontname ++ raise PDFError("Font %s not available in document" % psfontname) + + def getAvailableFonts(self): +- fontnames = self.fontMapping.keys() ++ fontnames = list(self.fontMapping.keys()) + fontnames.sort() + return fontnames + +@@ -284,10 +282,10 @@ class OutputGrabber: + def testOutputGrabber(): + gr = OutputGrabber() + for i in range(10): +- print 'line',i ++ print('line',i) + data = gr.getData() + gr.close() +- print 'Data...',data ++ print('Data...',data) + + + ############################################################## +@@ -562,4 +560,4 @@ def MakeFontDictionary(startpos, count): + return dict + + if __name__ == '__main__': +- print 'For test scripts, run test1.py to test6.py' ++ print('For test scripts, run test1.py to test6.py') +--- src/piddle/pdfgen.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/pdfgen.py +@@ -53,7 +53,7 @@ import sys + import string + import time + import tempfile +-import cStringIO ++import io + from types import * + from math import sin, cos, tan, pi, ceil + +@@ -570,12 +570,12 @@ class Canvas: + try: + import Image + except ImportError: +- print 'Python Imaging Library not available' ++ print('Python Imaging Library not available') + return + try: + import zlib + except ImportError: +- print 'zlib not available' ++ print('zlib not available') + return + + self._currentPageHasImages = 1 +@@ -605,9 +605,9 @@ class Canvas: + #write in blocks of (??) 60 characters per line to a list + compressed = imageFile.read() + encoded = pdfutils._AsciiBase85Encode(compressed) +- outstream = cStringIO.StringIO(encoded) ++ outstream = io.StringIO(encoded) + dataline = outstream.read(60) +- while dataline <> "": ++ while dataline != "": + imagedata.append(dataline) + dataline = outstream.read(60) + imagedata.append('EI') +@@ -618,7 +618,7 @@ class Canvas: + cachedname = os.path.splitext(image)[0] + '.a85' + imagedata = open(cachedname,'rb').readlines() + #trim off newlines... +- imagedata = map(string.strip, imagedata) ++ imagedata = list(map(string.strip, imagedata)) + + #parse line two for width, height + words = string.split(imagedata[1]) +@@ -643,9 +643,9 @@ class Canvas: + encoded = pdfutils._AsciiBase85Encode(compressed) #...sadly this isn't + + #write in blocks of (??) 60 characters per line to a list +- outstream = cStringIO.StringIO(encoded) ++ outstream = io.StringIO(encoded) + dataline = outstream.read(60) +- while dataline <> "": ++ while dataline != "": + imagedata.append(dataline) + dataline = outstream.read(60) + imagedata.append('EI') +@@ -680,48 +680,48 @@ class Canvas: + # This is based on Thomas Merz's code from GhostScript (viewjpeg.ps) + def readJPEGInfo(self, image): + "Read width, height and number of components from JPEG file" +- import struct ++ import struct + +- #Acceptable JPEG Markers: +- # SROF0=baseline, SOF1=extended sequential or SOF2=progressive +- validMarkers = [0xC0, 0xC1, 0xC2] ++ #Acceptable JPEG Markers: ++ # SROF0=baseline, SOF1=extended sequential or SOF2=progressive ++ validMarkers = [0xC0, 0xC1, 0xC2] + +- #JPEG markers without additional parameters +- noParamMarkers = \ +- [ 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, 0xD8, 0x01 ] ++ #JPEG markers without additional parameters ++ noParamMarkers = \ ++ [ 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, 0xD8, 0x01 ] + +- #Unsupported JPEG Markers +- unsupportedMarkers = \ +- [ 0xC3, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xCB, 0xCD, 0xCE, 0xCF ] ++ #Unsupported JPEG Markers ++ unsupportedMarkers = \ ++ [ 0xC3, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xCB, 0xCD, 0xCE, 0xCF ] + +- #read JPEG marker segments until we find SOFn marker or EOF +- done = 0 +- while not done: +- x = struct.unpack('B', image.read(1)) +- if x[0] == 0xFF: #found marker +- x = struct.unpack('B', image.read(1)) +- #print "Marker: ", '%0.2x' % x[0] +- #check marker type is acceptable and process it +- if x[0] in validMarkers: +- image.seek(2, 1) #skip segment length +- x = struct.unpack('B', image.read(1)) #data precision +- if x[0] != 8: +- raise 'PDFError', ' JPEG must have 8 bits per component' +- y = struct.unpack('BB', image.read(2)) +- height = (y[0] << 8) + y[1] +- y = struct.unpack('BB', image.read(2)) +- width = (y[0] << 8) + y[1] +- y = struct.unpack('B', image.read(1)) +- color = y[0] +- return width, height, color +- done = 1 +- elif x[0] in unsupportedMarkers: +- raise 'PDFError', ' Unsupported JPEG marker: %0.2x' % x[0] +- elif x[0] not in noParamMarkers: +- #skip segments with parameters +- #read length and skip the data +- x = struct.unpack('BB', image.read(2)) +- image.seek( (x[0] << 8) + x[1] - 2, 1) ++ #read JPEG marker segments until we find SOFn marker or EOF ++ done = 0 ++ while not done: ++ x = struct.unpack('B', image.read(1)) ++ if x[0] == 0xFF: #found marker ++ x = struct.unpack('B', image.read(1)) ++ #print "Marker: ", '%0.2x' % x[0] ++ #check marker type is acceptable and process it ++ if x[0] in validMarkers: ++ image.seek(2, 1) #skip segment length ++ x = struct.unpack('B', image.read(1)) #data precision ++ if x[0] != 8: ++ raise('PDFError', ' JPEG must have 8 bits per component') ++ y = struct.unpack('BB', image.read(2)) ++ height = (y[0] << 8) + y[1] ++ y = struct.unpack('BB', image.read(2)) ++ width = (y[0] << 8) + y[1] ++ y = struct.unpack('B', image.read(1)) ++ color = y[0] ++ return width, height, color ++ done = 1 ++ elif x[0] in unsupportedMarkers: ++ raise('PDFError', ' Unsupported JPEG marker: %0.2x' % x[0]) ++ elif x[0] not in noParamMarkers: ++ #skip segments with parameters ++ #read length and skip the data ++ x = struct.unpack('BB', image.read(2)) ++ image.seek((x[0] << 8) + x[1] - 2, 1) + + def setPageCompression(self, onoff=1): + """Possible values 1 or 0 (1 for 'on' is the default). +@@ -763,17 +763,17 @@ class Canvas: + if direction in [0,90,180,270]: + direction_arg = '/Di /%d' % direction + else: +- raise 'PDFError', ' directions allowed are 0,90,180,270' ++ raise('PDFError', ' directions allowed are 0,90,180,270') + + if dimension in ['H', 'V']: + dimension_arg = '/Dm /%s' % dimension + else: +- raise'PDFError','dimension values allowed are H and V' ++ raise('PDFError','dimension values allowed are H and V') + + if motion in ['I','O']: + motion_arg = '/M /%s' % motion + else: +- raise'PDFError','motion values allowed are I and O' ++ raise('PDFError','motion values allowed are I and O') + + + # this says which effects require which argument types from above +@@ -789,7 +789,7 @@ class Canvas: + try: + args = PageTransitionEffects[effectname] + except KeyError: +- raise 'PDFError', 'Unknown Effect Name "%s"' % effectname ++ raise('PDFError', 'Unknown Effect Name "%s"' % effectname) + self._pageTransitionString = '' + return + +@@ -1033,7 +1033,7 @@ class PDFTextObject: + if type(stuff) == StringType: + lines = string.split(string.strip(stuff), '\n') + if trim==1: +- lines = map(string.strip,lines) ++ lines = list(map(string.strip,lines)) + elif type(stuff) == ListType: + lines = stuff + elif type(stuff) == TupleType: +@@ -1055,4 +1055,4 @@ class PDFTextObject: + + + if __name__ == '__main__': +- print 'For test scripts, run testpdfgen.py' ++ print('For test scripts, run testpdfgen.py') +--- src/piddle/pdfgeom.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/pdfgeom.py +@@ -10,7 +10,7 @@ So far, just Robert Kern's bezierArc. + from math import sin, cos, pi, ceil + + def bezierArc(x1,y1, x2,y2, startAng=0, extent=90): +- """bezierArc(x1,y1, x2,y2, startAng=0, extent=90) --> List of BÈzier ++ """bezierArc(x1,y1, x2,y2, startAng=0, extent=90) --> List of Bezier + curve control points. + + (x1, y1) and (x2, y2) are the corners of the enclosing rectangle. The +@@ -22,7 +22,7 @@ semi-circle. + + The resulting coordinates are of the form (x1,y1, x2,y2, x3,y3, x4,y4) + such that the curve goes from (x1, y1) to (x4, y4) with (x2, y2) and +-(x3, y3) as their respective BÈzier control points.""" ++(x3, y3) as their respective Bezier control points.""" + + x1,y1, x2,y2 = min(x1,x2), max(y1,y2), max(x1,x2), min(y1,y2) + +--- src/piddle/pdfmetrics.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/pdfmetrics.py +@@ -93,7 +93,7 @@ class FontCache: + + def loadfont(self, fontname): + filename = AFMDIR + os.sep + fontname + '.afm' +- print 'cache loading',filename ++ print('cache loading',filename) + assert os.path.exists(filename) + widths = parseAFMfile(filename) + self.__widtharrays[fontname] = widths +@@ -107,7 +107,7 @@ class FontCache: + return self.__widtharrays[fontname] + except: + # font not found, use Courier +- print 'Font',fontname,'not found - using Courier for widths' ++ print('Font',fontname,'not found - using Courier for widths') + return self.getfont('courier') + + +@@ -120,7 +120,7 @@ class FontCache: + + def status(self): + #returns loaded fonts +- return self.__widtharrays.keys() ++ return list(self.__widtharrays.keys()) + + TheFontCache = FontCache() + +--- src/piddle/pdfutils.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/pdfutils.py +@@ -3,7 +3,7 @@ + + import os + import string +-import cStringIO ++import io + + LINEEND = '\015\012' + +@@ -32,9 +32,9 @@ def cacheImageFile(filename): + encoded = _AsciiBase85Encode(compressed) #...sadly this isn't + + #write in blocks of 60 characters per line +- outstream = cStringIO.StringIO(encoded) ++ outstream = io.StringIO(encoded) + dataline = outstream.read(60) +- while dataline <> "": ++ while dataline != "": + code.append(dataline) + dataline = outstream.read(60) + +@@ -45,7 +45,7 @@ def cacheImageFile(filename): + f = open(cachedname,'wb') + f.write(string.join(code, LINEEND)+LINEEND) + f.close() +- print 'cached image as %s' % cachedname ++ print('cached image as %s' % cachedname) + + + def preProcessImages(spec): +@@ -54,14 +54,14 @@ def preProcessImages(spec): + to save huge amounts of time when repeatedly building image + documents.""" + import types +- if type(spec) is types.StringType: ++ if type(spec) is bytes: + filelist = glob.glob(spec) + else: #list or tuple OK + filelist = spec + + for filename in filelist: + if cachedImageExists(filename): +- print 'cached version of %s already exists' % filename ++ print('cached version of %s already exists' % filename) + else: + cacheImageFile(filename) + +@@ -111,7 +111,7 @@ def _AsciiHexEncode(input): + """This is a verbose encoding used for binary data within + a PDF file. One byte binary becomes two bytes of ASCII.""" + "Helper function used by images" +- output = cStringIO.StringIO() ++ output = io.StringIO() + for char in input: + output.write('%02x' % ord(char)) + output.write('>') +@@ -126,7 +126,7 @@ def _AsciiHexDecode(input): + stripped = stripped[:-1] #chop off terminator + assert len(stripped) % 2 == 0, 'Ascii Hex stream has odd number of bytes' + i = 0 +- output = cStringIO.StringIO() ++ output = io.StringIO() + while i < len(stripped): + twobytes = stripped[i:i+2] + output.write(chr(eval('0x'+twobytes))) +@@ -136,21 +136,21 @@ def _AsciiHexDecode(input): + + def _AsciiHexTest(text='What is the average velocity of a sparrow?'): + "Do the obvious test for whether Ascii Hex encoding works" +- print 'Plain text:', text ++ print('Plain text:', text) + encoded = _AsciiHexEncode(text) +- print 'Encoded:', encoded ++ print('Encoded:', encoded) + decoded = _AsciiHexDecode(encoded) +- print 'Decoded:', decoded ++ print('Decoded:', decoded) + if decoded == text: +- print 'Passed' ++ print('Passed') + else: +- print 'Failed!' ++ print('Failed!') + + def _AsciiBase85Encode(input): + """This is a compact encoding used for binary data within + a PDF file. Four bytes of binary data become five bytes of + ASCII. This is the default method used for encoding images.""" +- outstream = cStringIO.StringIO() ++ outstream = io.StringIO() + # special rules apply if not a multiple of four bytes. + whole_word_count, remainder_size = divmod(len(input), 4) + cut = 4 * whole_word_count +@@ -163,7 +163,7 @@ def _AsciiBase85Encode(input): + b3 = ord(body[offset+2]) + b4 = ord(body[offset+3]) + +- num = 16777216L * b1 + 65536 * b2 + 256 * b3 + b4 ++ num = 16777216 * b1 + 65536 * b2 + 256 * b3 + b4 + + if num == 0: + #special case +@@ -194,7 +194,7 @@ def _AsciiBase85Encode(input): + b3 = ord(lastbit[2]) + b4 = ord(lastbit[3]) + +- num = 16777216L * b1 + 65536 * b2 + 256 * b3 + b4 ++ num = 16777216 * b1 + 65536 * b2 + 256 * b3 + b4 + + #solve for c1..c5 + temp, c5 = divmod(num, 85) +@@ -217,7 +217,7 @@ def _AsciiBase85Encode(input): + def _AsciiBase85Decode(input): + """This is not used - Acrobat Reader decodes for you - but a round + trip is essential for testing.""" +- outstream = cStringIO.StringIO() ++ outstream = io.StringIO() + #strip all whitespace + stripped = string.join(string.split(input),'') + #check end +@@ -229,7 +229,7 @@ def _AsciiBase85Decode(input): + # special rules apply if not a multiple of five bytes. + whole_word_count, remainder_size = divmod(len(stripped), 5) + #print '%d words, %d leftover' % (whole_word_count, remainder_size) +- assert remainder_size <> 1, 'invalid Ascii 85 stream!' ++ assert remainder_size != 1, 'invalid Ascii 85 stream!' + cut = 5 * whole_word_count + body, lastbit = stripped[0:cut], stripped[cut:] + +@@ -301,14 +301,14 @@ def _wrap(input, columns=60): + + def _AsciiBase85Test(text='What is the average velocity of a sparrow?'): + "Do the obvious test for whether Base 85 encoding works" +- print 'Plain text:', text ++ print('Plain text:', text) + encoded = _AsciiBase85Encode(text) +- print 'Encoded:', encoded ++ print('Encoded:', encoded) + decoded = _AsciiBase85Decode(encoded) +- print 'Decoded:', decoded ++ print('Decoded:', decoded) + if decoded == text: +- print 'Passed' ++ print('Passed') + else: +- print 'Failed!' ++ print('Failed!') + + +--- src/piddle/piddle.py.orig 2002-06-03 13:48:50 UTC ++++ src/piddle/piddle.py +@@ -143,7 +143,7 @@ class Color: + d["blue"] = _float(blue) + + def __setattr__(self, name, value): +- raise TypeError, "piddle.Color has read-only attributes" ++ raise TypeError("piddle.Color has read-only attributes") + + def __mul__(self,x): + return Color(self.red*x, self.green*x, self.blue*x) +@@ -369,7 +369,7 @@ class Font: + self.underline, repr(self.face)) + + def __setattr__(self, name, value): +- raise TypeError, "piddle.Font has read-only attributes" ++ raise TypeError("piddle.Font has read-only attributes") + + + #------------------------------------------------------------------------- +@@ -453,25 +453,23 @@ class Canvas: + but which might be buffered should be flushed to the screen" + pass + +- def save(self, file=None, format=None): ++ def save(self, file=None, format=None): + +- """For backends that can be save to a file or sent to a +- stream, create a valid file out of what's currently been +- drawn on the canvas. Trigger any finalization here. +- Though some backends may allow further drawing after this call, +- presume that this is not possible for maximum portability ++ """For backends that can be save to a file or sent to a ++ stream, create a valid file out of what's currently been ++ drawn on the canvas. Trigger any finalization here. ++ Though some backends may allow further drawing after this call, ++ presume that this is not possible for maximum portability + +- file may be either a string or a file object with a write method +- if left as the default, the canvas's current name will be used ++ file may be either a string or a file object with a write method ++ if left as the default, the canvas's current name will be used + +- format may be used to specify the type of file format to use as +- well as any corresponding extension to use for the filename +- This is an optional argument and backends may ignore it if +- they only produce one file format.""" +- pass ++ format may be used to specify the type of file format to use as ++ well as any corresponding extension to use for the filename ++ This is an optional argument and backends may ignore it if ++ they only produce one file format.""" ++ pass + +- +- + def setInfoLine(self, s): + "For interactive Canvases, displays the given string in the \ + 'info line' somewhere where the user can probably see it." +@@ -481,7 +479,7 @@ class Canvas: + def stringWidth(self, s, font=None): + "Return the logical width of the string if it were drawn \ + in the current font (defaults to self.font)." +- raise NotImplementedError, 'stringWidth' ++ raise NotImplementedError('stringWidth') + + def fontHeight(self, font=None): + "Find the height of one line of text (baseline to baseline) of the given font." +@@ -492,11 +490,11 @@ class Canvas: + + def fontAscent(self, font=None): + "Find the ascent (height above base) of the given font." +- raise NotImplementedError, 'fontAscent' ++ raise NotImplementedError('fontAscent') + + def fontDescent(self, font=None): + "Find the descent (extent below base) of the given font." +- raise NotImplementedError, 'fontDescent' ++ raise NotImplementedError('fontDescent') + + #------------- drawing helpers -------------- + +@@ -602,7 +600,7 @@ class Canvas: + + def drawLine(self, x1,y1, x2,y2, color=None, width=None): + "Draw a straight line between x1,y1 and x2,y2." +- raise NotImplementedError, 'drawLine' ++ raise NotImplementedError('drawLine') + + def drawLines(self, lineList, color=None, width=None): + "Draw a set of lines of uniform color and width. \ +@@ -617,7 +615,7 @@ class Canvas: + def drawString(self, s, x,y, font=None, color=None, angle=0): + "Draw a string starting at location x,y." + # NOTE: the baseline goes on y; drawing covers (y-ascent,y+descent) +- raise NotImplementedError, 'drawString' ++ raise NotImplementedError('drawString') + + + # For fillable shapes, edgeColor defaults to self.defaultLineColor, +@@ -627,7 +625,7 @@ class Canvas: + + def drawCurve(self, x1,y1, x2,y2, x3,y3, x4,y4, + edgeColor=None, edgeWidth=None, fillColor=None, closed=0): +- "Draw a Bézier curve with control points x1,y1 to x4,y4." ++ "Draw a Bezier curve with control points x1,y1 to x4,y4." + + pointlist = self.curvePoints(x1, y1, x2, y2, x3, y3, x4, y4) + self.drawPolygon(pointlist, +@@ -697,7 +695,7 @@ class Canvas: + pointlist: a list of (x,y) tuples defining vertices + closed: if 1, adds an extra segment connecting the last point to the first + """ +- raise NotImplementedError, 'drawPolygon' ++ raise NotImplementedError('drawPolygon') + + def drawFigure(self, partList, + edgeColor=None, edgeWidth=None, fillColor=None, closed=0): +@@ -719,7 +717,7 @@ class Canvas: + elif op == figureCurve: + pointList.extend(apply(self.curvePoints,args)) + else: +- raise TypeError, "unknown figure operator: "+op ++ raise TypeError("unknown figure operator: ", op) + + self.drawPolygon(pointList, edgeColor, edgeWidth, fillColor, closed=closed) + +@@ -729,7 +727,7 @@ class Canvas: + def drawImage(self, image, x1,y1, x2=None,y2=None): + """Draw a PIL Image into the specified rectangle. If x2 and y2 are + omitted, they are calculated from the image size.""" +- raise NotImplementedError, 'drawImage' ++ raise NotImplementedError('drawImage') + + + +@@ -748,9 +746,9 @@ def getFileObject(file): + if hasattr(file, "write"): + fileobj = file + else: +- raise 'Invalid file argument to save' ++ raise('Invalid file argument to save') + else: +- raise 'Invalid file argument to save' ++ raise('Invalid file argument to save') + + return fileobj + +--- src/piddle/piddleAI.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleAI.py +@@ -19,7 +19,7 @@ from piddle import * + import aigen + import string + import zlib +-import cStringIO ++import io + + from math import sin, cos, pi, ceil + +@@ -35,7 +35,7 @@ class AICanvas(Canvas): + + def __init__(self, size=(0,0), name='piddle.ai'): + Canvas.__init__(self, size, name=name) +- print name ++ print(name) + if name[-3:] == '.ai': + self.name = name + else: +@@ -91,7 +91,7 @@ class AICanvas(Canvas): + self.setBoundingBox() + stream = self.winding + '\n' + self.winOrder + stream = [stream] + self.code +- self.doc.setPage(stream) ++ self.doc.setPage(stream) + + def save(self, file=None, format=None): + """Saves the file. If holding data, do +@@ -99,7 +99,7 @@ class AICanvas(Canvas): + if len(self.code): + self.showPage() + self.doc.SaveToFile(self.name) +- print 'saved', self.name ++ print('saved', self.name) + + + +@@ -428,7 +428,7 @@ class AICanvas(Canvas): + sl.append('l') + # print sl + # print sk +- if i <> 0: ++ if i != 0: + self.code.append(tuple(sl)) + self.code.append(tuple(sk)) + +@@ -474,10 +474,10 @@ class AICanvas(Canvas): + edgeWidth=None, fillColor=None, closed=0): + start = pointlist[0] + pointlist = pointlist[1:] +- x1 = min(map(lambda (x,y) : x, pointlist)) +- x2 = max(map(lambda (x,y) : x, pointlist)) +- y1 = min(map(lambda (x,y) : y, pointlist)) +- y2 = max(map(lambda (x,y) : y, pointlist)) ++ x1 = min([x_y[0] for x_y in pointlist]) ++ x2 = max([x_y1[0] for x_y1 in pointlist]) ++ y1 = min([x_y2[1] for x_y2 in pointlist]) ++ y2 = max([x_y3[1] for x_y3 in pointlist]) + self._updateFillColor(fillColor) + self._updateLineWidth(edgeWidth) + self._updateLineColor(edgeColor) +@@ -505,7 +505,7 @@ class AICanvas(Canvas): + + + def drawString(): +- print "Sorry Not yet impemented" ++ print("Sorry Not yet impemented") + + + +--- src/piddle/piddleFIG.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleFIG.py +@@ -363,7 +363,7 @@ class FIGCanvas(Canvas): + + def clear(self): + """Reset canvas to its default state.""" +- raise NotImplementedError, "clear" ++ raise NotImplementedError("clear") + + def save(self, file=None, format=None): + """Write the current document to a file or stream and close the file. +@@ -472,7 +472,7 @@ class FIGCanvas(Canvas): + } + self.code.append(code) + line = [] +- pointlist = map(lambda p, f=bp: (p[0]*bp, p[1]*bp), pointlist) ++ pointlist = list(map(lambda p, f=bp: (p[0]*bp, p[1]*bp), pointlist)) + for coords in pointlist: + code = self.coord_fmt % coords + line.append(code) +@@ -537,7 +537,7 @@ class FIGCanvas(Canvas): + self.code.append(code) + code = [] + pointlist = [(x1, y1), (x1, y2), (x2, y2), (x2, y1), (x1, y1)] +- pointlist = map(lambda p, f=bp: (p[0]*bp, p[1]*bp), pointlist) ++ pointlist = list(map(lambda p, f=bp: (p[0]*bp, p[1]*bp), pointlist)) + for coords in pointlist: + code.append(self.coord_fmt % coords) + code = " ".join(code) +--- src/piddle/piddleGL.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleGL.py +@@ -61,7 +61,7 @@ try: + def vertexCB(self, O): + glVertex2d(O[0], O[1]) + def combineCB(self, p1, p2, p3): +- print len(p3) ++ print(len(p3)) + return p3[0][-1] + def edgeFlagCB(self, *args): + pass +@@ -149,7 +149,7 @@ class _GLCanvas(Canvas): + glCallList(list) + else: + func, args, kw = list +- apply(func, args, kw) ++ func(*args, **kw) + glFlush() + + def drawLine(self, x1,y1, x2,y2, color=None, width=None): +@@ -274,7 +274,7 @@ class _GLCanvas(Canvas): + try: + import Image + except ImportError: +- raise ImportError, 'Saving to a non-PPM format is not available because PIL is not installed' ++ raise ImportError('Saving to a non-PPM format is not available because PIL is not installed') + savefname = base+'.ppm' + glSavePPM(savefname, self._width, self._height) + i = Image.open(savefname) +@@ -357,7 +357,7 @@ def getGLTTFontWrapper(): + if face is None: face = 'arial' + face = string.lower(face) + self.face = face +- if self.maps.has_key(face): ++ if face in self.maps: + face = self.maps[face] + if bold: + if italic: +@@ -421,16 +421,16 @@ try: + pass + + def keyboard(*args): +- print args ++ print(args) + + def mainloop(self): + glutMainLoop() +- if _debug: print "# GlutCanvas available" ++ if _debug: print("# GlutCanvas available") + except NameError: + pass + + try: +- import Tkinter ++ import tkinter + from OpenGL.Tk import RawOpengl + class TkInteractive: + def __init__(self): +@@ -463,7 +463,7 @@ try: + 'height':height}) + self._width = width + self._height = height +- apply(RawOpengl.__init__, (self,), kw) ++ RawOpengl.__init__(*(self,), **kw) + _GLCanvas.__init__(self, size=size, name=name) + TkInteractive.__init__(self) + self.bind('', self.resize) +@@ -474,7 +474,7 @@ try: + self.configure(width=w, height=h) + self._width = w + self._height= h +- Tkinter.Frame.configure(self) ++ tkinter.Frame.configure(self) + + def redraw(self): + if self._inList: self._saveList() +@@ -489,7 +489,7 @@ try: + + def setInfoLine(self, s): + pass +- if _debug: print "# ToglCanvas available" ++ if _debug: print("# ToglCanvas available") + except ImportError: + pass + +@@ -498,7 +498,7 @@ try: + except NameError: + GLCanvas = GlutCanvas + except NameError: +- raise ImportError, "Couldn't get either GLUT or Togl loaded" ++ raise ImportError("Couldn't get either GLUT or Togl loaded") + + def getGLUTFontWrapper(): + class GLUTFontWrapper: +@@ -516,7 +516,7 @@ def getGLUTFontWrapper(): + self.size=font.size + if face is None: face = 'glutStrokeRomanFixed' + face = string.lower(face) +- if self.maps.has_key(face): ++ if face in self.maps: + face = self.maps[face] + self.glutface = face + def stringWidth(self, s): +@@ -562,6 +562,6 @@ except ImportError: + + if _debug: + if FontSupport == 0: +- print "# Can't find font support" ++ print("# Can't find font support") + else: +- print "# Using fonts from:", FontWrapper.__name__ ++ print("# Using fonts from:", FontWrapper.__name__) +--- src/piddle/piddleGTK/core.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleGTK/core.py +@@ -98,7 +98,7 @@ class BasicCanvas(piddle.Canvas): + # and make sure the canvas is big enough: + iwidth = iheight = 0 + for i in range(len(lineList)): +- x1, y1, x2, y2 = map(int, map(round, lineList[i])) ++ x1, y1, x2, y2 = list(map(int, list(map(round, lineList[i])))) + iwidth = max(iwidth, x1, x2) + iheight = max(iheight, y1, y2) + # +@@ -120,7 +120,7 @@ class BasicCanvas(piddle.Canvas): + return + angle = int(round(angle)) + if angle != 0: +- raise NotImplementedError, "rotated text not implemented" ++ raise NotImplementedError("rotated text not implemented") + if font is None: + font = self.defaultFont + lines = string.split(s, "\n") +@@ -163,7 +163,7 @@ class BasicCanvas(piddle.Canvas): + def drawPolygon(self, pointlist, edgeColor=None, edgeWidth=None, + fillColor=None, closed=0): + if len(pointlist) < 3: +- raise ValueError, "too few points in the point list" ++ raise ValueError("too few points in the point list") + # XXX lots more should be checked + if edgeColor is None: + edgeColor = self.defaultLineColor +@@ -227,7 +227,7 @@ class BasicCanvas(piddle.Canvas): + def ensure_size(self, width, height): + # like __ensure_size(), but doesn't return buffer + if (width <= 0) or (height <= 0): +- raise ValueError, "width and height must both be positive" ++ raise ValueError("width and height must both be positive") + self.__ensure_size(width, height) + + +@@ -303,7 +303,7 @@ def _font_to_gdkfont(font): + try: + return _xlfd_to_gdkfont(xlfd) + except RuntimeError: +- print "failed to load", xlfd ++ print("failed to load", xlfd) + raise + + +--- src/piddle/piddleGTK/tests.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleGTK/tests.py +@@ -36,8 +36,8 @@ def main(): + top.add(bbox) + top.connect("destroy", gtk.mainquit) + top.connect("delete_event", gtk.mainquit) +- tests = map((lambda test: (string.capitalize(test.__name__), test)), +- piddletest.tests) ++ tests = list(map((lambda test: (string.capitalize(test.__name__), test)), ++ piddletest.tests)) + tests.extend(testitems) + for name, test in tests: + b = gtk.GtkButton(name) +@@ -118,7 +118,7 @@ def font_mapping(canvasClass): + key = piddleGTK.core._font_to_key(font) + xlfd = piddleGTK.core._fontkey_to_xlfd(key) + f.write("%s\n" % font) +- f.write(" %s\n" % `key`) ++ f.write(" %s\n" % repr(key)) + f.write(" %s\n\n" % xlfd) + # just have to have a .flush() method: + return f +@@ -143,7 +143,7 @@ def onOver(canvas, x, y): + canvas.setInfoLine("onOver(%s, %s)" % (x, y)) + + def onKey(canvas, key, modifiers): +- canvas.setInfoLine("onKey(%s, %s)" % (`key`, modifiers)) ++ canvas.setInfoLine("onKey(%s, %s)" % (repr(key), modifiers)) + + + +--- src/piddle/piddlePDF.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddlePDF.py +@@ -16,7 +16,7 @@ self.pdf which offers numerous lower-level drawing rou + + #standard python library modules + import string +-import cStringIO ++import io + import pdfmetrics + import glob + import os +@@ -134,7 +134,7 @@ class PDFCanvas(Canvas): + + #if they specified a size smaller than page, + # be helpful and centre their diagram +- if self.pagesize <> self.drawingsize: ++ if self.pagesize != self.drawingsize: + dx = 0.5 * (self.pagesize[0] - self.drawingsize[0]) + dy = 0.5 * (self.pagesize[1] - self.drawingsize[1]) + self.pdf.translate(dx, dy) +@@ -183,7 +183,7 @@ class PDFCanvas(Canvas): + + if hasattr(file, 'write'): + self.pdf.save(fileobj=file) +- elif isinstance(file, types.StringType): ++ elif isinstance(file, bytes): + self.pdf.save(filename=file) + else: + self.pdf.save() +@@ -236,7 +236,7 @@ class PDFCanvas(Canvas): + face = 'serif' + else: + face = string.lower(font.face) +- while font_face_map.has_key(face): ++ while face in font_face_map: + face = font_face_map[face] + #step 2, - resolve bold/italic to get the right PS font name + psname = ps_font_map[(face, font.bold, font.italic)] +@@ -305,8 +305,8 @@ class PDFCanvas(Canvas): + else: + self.pdf.drawPath( + path, +- (edge <> transparent), #whether to stroke +- (fill <> transparent) #whether to fill ++ (edge != transparent), #whether to stroke ++ (fill != transparent) #whether to fill + ) + + #------------- drawing methods -------------- +@@ -372,7 +372,7 @@ class PDFCanvas(Canvas): + # inserting basic commands here to see if can get working + textobj = self.pdf.beginText() + +- if col <> self.defaultFillColor: ++ if col != self.defaultFillColor: + textobj.setFillColorRGB(col.red,col.green, col.blue) + + if angle != 0 : +--- src/piddle/piddlePIL.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddlePIL.py +@@ -42,10 +42,10 @@ else: + # load font metrics + try: + f = open(os.path.join(_fontprefix,'metrics.dat'), 'rb') +- import cPickle +- _widthmaps = cPickle.load(f) +- _ascents = cPickle.load(f) +- _descents = cPickle.load(f) ++ import pickle ++ _widthmaps = pickle.load(f) ++ _ascents = pickle.load(f) ++ _descents = pickle.load(f) + f.close() + except: + Log.write("Warning: unable to load font metrics!\n") +@@ -86,12 +86,12 @@ def _matchingFontPath(font): + if type(face) == StringType: + path = _pilFontPath(face,size,font.bold) + path = string.split(path,os.sep)[-1] +- if path in _widthmaps.keys(): return path ++ if path in list(_widthmaps.keys()): return path + else: + for item in font.face: + path = _pilFontPath(item,size,font.bold) + path = string.split(path,os.sep)[-1] +- if path in _widthmaps.keys(): return path ++ if path in list(_widthmaps.keys()): return path + # not found? Try it with courier, which should always be there + path = _pilFontPath('courier',size,font.bold) + return string.split(path,os.sep)[-1] +@@ -126,7 +126,7 @@ class PILCanvas( Canvas ): + self._pen = ImageDraw.ImageDraw(self._image) + self._pen.setink(0) + self._setFont( Font() ) +- self._pilversion = map(string.atoi, string.split(Image.VERSION, ".")) ++ self._pilversion = list(map(string.atoi, string.split(Image.VERSION, "."))) + Canvas.__init__(self, size, name) + + def __setattr__(self, attribute, value): +@@ -157,7 +157,7 @@ class PILCanvas( Canvas ): + # below here, file is guaranteed to be a string + if format == None: + if '.' not in file: +- raise TypeError, 'no file type given to save()' ++ raise TypeError('no file type given to save()') + filename = file + else: + filename = file + '.' + format +@@ -344,7 +344,8 @@ class PILCanvas( Canvas ): + temppen = ImageDraw.ImageDraw(tempimg) + temppen.setink( (255,255,255) ) + pilfont = _pilFont(font) +- if not pilfont: raise "bad font!", font ++ if not pilfont: ++ raise("bad font!", font) + temppen.setfont( pilfont ) + pos = [4, int(tempsize/2 - self.fontAscent(font)) - self.fontDescent(font)] + temppen.text( pos, s ) +@@ -391,8 +392,8 @@ def test(): + canvas = PILCanvas() + + canvas.defaultLineColor = Color(0.7,0.7,1.0) # light blue +- canvas.drawLines( map(lambda i:(i*10,0,i*10,300), range(30)) ) +- canvas.drawLines( map(lambda i:(0,i*10,300,i*10), range(30)) ) ++ canvas.drawLines( [(i*10,0,i*10,300) for i in range(30)] ) ++ canvas.drawLines( [(0,i*10,300,i*10) for i in range(30)] ) + canvas.defaultLineColor = black + + canvas.drawLine(10,200, 20,190, color=red) +--- src/piddle/piddlePS.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddlePS.py +@@ -38,7 +38,7 @@ piddlePS - a PostScript backend for the PIDDLE drawing + + + from piddle import * +-import string, cStringIO ++import string, io + import piddlePSmetrics # for font info + import math + +@@ -309,7 +309,7 @@ translate + # once again, fall back to default, redundant, no? + face = string.lower(PiddleLegalFonts["serif"]) + for reqFace in requested: +- if PiddleLegalFonts.has_key(string.lower(reqFace)): ++ if string.lower(reqFace) in PiddleLegalFonts: + face = string.lower(PiddleLegalFonts[string.lower(reqFace)]) + break + +@@ -591,7 +591,7 @@ translate + self.code.extend([ + 'gsave', + '%s %s neg translate' % (x,y), +- `angle`+' rotate']) ++ repr(angle)+' rotate']) + down = 0 + for line in lines : + self._drawStringOneLine(line, 0, 0+down, font, color, angle) +@@ -770,7 +770,7 @@ translate + figureCode.append("%s %s neg lineto" % tuple(args[:2])) + figureCode.append("%s %s neg %s %s neg %s %s neg curveto" % tuple(args[2:])) + else: +- raise TypeError, "unknown figure operator: "+op ++ raise TypeError("unknown figure operator: "+op) + + if closed: + figureCode.append("closepath") +@@ -798,10 +798,10 @@ translate + try: + import Image + except ImportError: +- print 'Python Imaging Library not available' ++ print('Python Imaging Library not available') + return + # For now let's start with 24 bit RGB images (following piddlePDF again) +- print "Trying to drawImage in piddlePS" ++ print("Trying to drawImage in piddlePS") + component_depth = 8 + myimage = image.convert('RGB') + imgwidth, imgheight = myimage.size +@@ -811,7 +811,7 @@ translate + y2 = y1 + imgheight + drawwidth = x2 - x1 + drawheight = y2 - y1 +- print 'Image size (%d, %d); Draw size (%d, %d)' % (imgwidth, imgheight, drawwidth, drawheight) ++ print('Image size (%d, %d); Draw size (%d, %d)' % (imgwidth, imgheight, drawwidth, drawheight)) + # now I need to tell postscript how big image is + + # "image operators assume that they receive sample data from +@@ -851,15 +851,15 @@ translate + # piddlePDF again + + rawimage = myimage.tostring() +- assert(len(rawimage) == imgwidth*imgheight, 'Wrong amount of data for image') ++ assert(len(rawimage) == imgwidth*imgheight, 'Wrong amount of data for image') + #compressed = zlib.compress(rawimage) # no zlib at moment + hex_encoded = self._AsciiHexEncode(rawimage) + + # write in blocks of 78 chars per line +- outstream = cStringIO.StringIO(hex_encoded) ++ outstream = io.StringIO(hex_encoded) + + dataline = outstream.read(78) +- while dataline <> "": ++ while dataline != "": + self.code.append(dataline) + dataline= outstream.read(78) + self.code.append('% end of image data') # for clarity +@@ -870,7 +870,7 @@ translate + + def _AsciiHexEncode(self, input): # also based on piddlePDF + "Helper function used by images" +- output = cStringIO.StringIO() ++ output = io.StringIO() + for char in input: + output.write('%02x' % ord(char)) + output.reset() +@@ -880,7 +880,7 @@ translate + try: + import Image + except ImportError: +- print 'Python Imaging Library not available' ++ print('Python Imaging Library not available') + return + # I don't have zlib -cwl + # try: +@@ -892,12 +892,12 @@ translate + + ### what sort of image are we to draw + if image.mode=='L' : +- print 'found image.mode= L' ++ print('found image.mode= L') + imBitsPerComponent = 8 + imNumComponents = 1 + myimage = image + elif image.mode == '1': +- print 'found image.mode= 1' ++ print('found image.mode= 1') + myimage = image.convert('L') + imNumComponents = 1 + myimage = image +@@ -923,7 +923,7 @@ translate + self.code.append('/DeviceRGB setcolorspace') + elif imNumComponents == 1 : + self.code.append('/DeviceGray setcolorspace') +- print 'setting colorspace gray' ++ print('setting colorspace gray') + # create the image dictionary + self.code.append(""" + << +@@ -942,15 +942,15 @@ translate + 'image']) + # after image operator just need to dump image dat to file as hexstring + rawimage = myimage.tostring() +- assert(len(rawimage) == imwidth*imheight, 'Wrong amount of data for image') ++ assert(len(rawimage) == imwidth*imheight, 'Wrong amount of data for image') + #compressed = zlib.compress(rawimage) # no zlib at moment + hex_encoded = self._AsciiHexEncode(rawimage) + + # write in blocks of 78 chars per line +- outstream = cStringIO.StringIO(hex_encoded) ++ outstream = io.StringIO(hex_encoded) + + dataline = outstream.read(78) +- while dataline <> "": ++ while dataline != "": + self.code.append(dataline) + dataline= outstream.read(78) + self.code.append('> % end of image data') # > is EOD for hex encoded filterfor clarity +--- src/piddle/piddlePSmetrics.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddlePSmetrics.py +@@ -125,7 +125,7 @@ class FontCache: + + def loadfont(self, fontname): + filename = AFMDIR + os.sep + fontname + '.afm' +- print 'cache loading',filename ++ print('cache loading',filename) + assert os.path.exists(filename) + widths = parseAFMfile(filename) + self.__widtharrays[fontname] = widths +@@ -139,7 +139,7 @@ class FontCache: + return self.__widtharrays[fontname] + except: + # font not found, use Courier +- print 'Font',fontname,'not found - using Courier for widths' ++ print('Font',fontname,'not found - using Courier for widths') + return self.getfont('courier') + + +@@ -152,7 +152,7 @@ class FontCache: + + def status(self): + #returns loaded fonts +- return self.__widtharrays.keys() ++ return list(self.__widtharrays.keys()) + + TheFontCache = FontCache() + +@@ -168,7 +168,7 @@ def psStringWidth(text, font, encoding): + try: + widths = _psWidths[encoding][string.lower(font) ] + except: +- raise KeyError, "Improper encoding %s or font name %s" % (encoding, font) ++ raise KeyError("Improper encoding %s or font name %s" % (encoding, font)) + w = 0 + for char in text: + w = w + widths[ord(char)] +--- src/piddle/piddleQD.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleQD.py +@@ -63,7 +63,7 @@ _curCanvas = None + + # global dictionary mapping font names to QD font IDs + _fontMap = {} +-for item in filter(lambda x:x[0]!='_',dir(Fonts)): ++for item in [x for x in dir(Fonts) if x[0]!='_']: + _fontMap[string.lower(item)] = Fonts.__dict__[item] + _fontMap['system'] = Fonts.kFontIDGeneva + _fontMap['monospaced'] = Fonts.kFontIDMonaco +@@ -248,7 +248,7 @@ class QDCanvas( Canvas ): + return 0 # font not found! + + # cache the fontID for quicker reference next time! +- font.__dict__['_QDfontID'] = fontID ++ font.__dict__['_QDfontID'] = fontID + # font._QDfontID = fontID + Qd.TextFont(fontID) + +@@ -569,19 +569,19 @@ def test(): + #import Image + #canvas.drawImage( Image.open(path), 0,0,300,300 ); + +- def myOnClick(canvas,x,y): print "clicked %s,%s" % (x,y) ++ def myOnClick(canvas,x,y): print("clicked %s,%s" % (x,y)) + canvas.onClick = myOnClick + + def myOnOver(canvas,x,y): canvas.setInfoLine( "mouse is over %s,%s" % (x,y) ) + + canvas.onOver = myOnOver + +- def myOnKey(canvas,key,mods): print "pressed %s with modifiers %s" % (key,mods) ++ def myOnKey(canvas,key,mods): print("pressed %s with modifiers %s" % (key,mods)) + canvas.onKey = myOnKey + + +- canvas.drawLines( map(lambda i:(i*10,0,i*10,300), range(30)) ) +- canvas.drawLines( map(lambda i:(0,i*10,300,i*10), range(30)) ) ++ canvas.drawLines( [(i*10,0,i*10,300) for i in range(30)] ) ++ canvas.drawLines( [(0,i*10,300,i*10) for i in range(30)] ) + canvas.defaultLineColor = black + + canvas.drawLine(10,200, 20,190, color=red) +--- src/piddle/piddleSVG/piddleSVG.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleSVG/piddleSVG.py +@@ -72,7 +72,7 @@ def _PointListToSVG(points,dupFirst=0): + + """ + outStr = '' +- for i in xrange(len(points)): ++ for i in range(len(points)): + outStr = outStr + '%.2f,%.2f '%(points[i][0],points[i][1]) + # add back on the first point. This is not required in the spec, + # but Adobe's beta-quality viewer seems to not like it being skipped +@@ -159,7 +159,7 @@ class SVGCanvas( Canvas ): + familyStr = '\'%s\''%(face) + else: + familyStr = face +- for i in xrange(1,len(font.face)): ++ for i in range(1,len(font.face)): + face = font.face[i] + if len(string.split(face)) > 1: + familyStr = ', \'%s\''%(face) +@@ -241,14 +241,14 @@ class SVGCanvas( Canvas ): + def save(self, type=''): + if type == '': + if '.' not in self.name: +- raise TypeError, 'no file type given to save()' ++ raise TypeError('no file type given to save()') + filename = self.name + else: + filename = self.name + '.' + type + outFile = open(filename,'w+') + outFile.write(self._txt+'') + outFile.close() +- print filename, "saved" ++ print(filename, "saved") + + + #------------- drawing methods -------------- +@@ -515,7 +515,7 @@ class SVGCanvas( Canvas ): + pathStr = pathStr + self._FormArcStr(x1,y1,x2,y2,theta1,extent) + + else: +- raise TypeError, "unknown figure operator: "+op ++ raise TypeError("unknown figure operator: "+op) + + if closed == 1: + pathStr = pathStr + 'Z' +@@ -568,8 +568,8 @@ def test(): + canvas = SVGCanvas(name="test") + + canvas.defaultLineColor = Color(0.7,0.7,1.0) # light blue +- canvas.drawLines( map(lambda i:(i*10,0,i*10,300), range(30)) ) +- canvas.drawLines( map(lambda i:(0,i*10,300,i*10), range(30)) ) ++ canvas.drawLines( [(i*10,0,i*10,300) for i in range(30)] ) ++ canvas.drawLines( [(0,i*10,300,i*10) for i in range(30)] ) + canvas.defaultLineColor = black + + canvas.drawLine(10,200, 20,190, color=red) +--- src/piddle/piddleTK2/piddleTK.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleTK2/piddleTK.py +@@ -15,7 +15,7 @@ You can find the latest version of this file: + via http://piddle.sourceforge.net + """ + +-import Tkinter, tkFont ++import tkinter, tkinter.font + tk = Tkinter + import piddle + import string +@@ -120,7 +120,7 @@ class FontManager: + # check if the user specified a generic face type + # like serif or monospaced. check is case-insenstive. + f = string.lower(font.face) +- if self.__alt_faces.has_key(f): ++ if f in self.__alt_faces: + family = self.__alt_faces[f] + else: + family = font.face +@@ -138,7 +138,7 @@ class FontManager: + key = (family,size,weight,slant,underline) + + # check if we've already seen this font. +- if self.font_cache.has_key(key): ++ if key in self.font_cache: + # yep, don't bother creating a new one. just fetch it. + font = self.font_cache[key] + else: +@@ -146,7 +146,7 @@ class FontManager: + # this way we will return info about the actual font + # selected by Tk, which may be different than what we ask + # for if it's not availible. +- font = tkFont.Font(self.master, family=family, size=size, weight=weight, ++ font = tkinter.font.Font(self.master, family=family, size=size, weight=weight, + slant=slant,underline=underline) + self.font_cache[(family,size,weight,slant,underline)] = font + +@@ -204,7 +204,7 @@ class BaseTKCanvas(tk.Canvas, piddle.Canvas): + tk.Canvas.update(self) + + def clear(self): +- map(self.delete,self._item_ids) ++ list(map(self.delete,self._item_ids)) + self._item_ids = [] + + def _colorToTkColor(self, c): +@@ -259,7 +259,7 @@ class BaseTKCanvas(tk.Canvas, piddle.Canvas): + font = self._font_manager.getTkFontString(font or self.defaultFont) + new_item = self.create_text(x, y, text=s, + font=font, fill=color, +- anchor=Tkinter.W) ++ anchor=tkinter.W) + self._item_ids.append(new_item) + + def _drawRotatedString(self, s, x,y, font=None, color=None, angle=0): +@@ -292,7 +292,8 @@ class BaseTKCanvas(tk.Canvas, piddle.Canvas): + temppen.setink( (255,255,255) ) + pilfont = pp._pilFont(font) + +- if not pilfont: raise "bad font!", font ++ if not pilfont: ++ raise("bad font!", font) + + temppen.setfont( pilfont ) + pos = [4, int(tempsize/2 - pilCan.fontAscent(font)) - pilCan.fontDescent(font)] +@@ -390,7 +391,7 @@ class BaseTKCanvas(tk.Canvas, piddle.Canvas): + if fillColor == self.__TRANSPARENT: + # draw open-ended set of lines + d = { 'fill':edgeColor, 'width': edgeWidth} +- new_item = apply(self.create_line, pointlist, d) ++ new_item = self.create_line(*pointlist, **d) + else: + # open filled shape. + # draw it twice: +@@ -403,7 +404,7 @@ class BaseTKCanvas(tk.Canvas, piddle.Canvas): + self._item_ids.append(new_item) + + d = { 'fill':edgeColor, 'width': edgeWidth} +- new_item = apply(self.create_line, pointlist, d) ++ new_item = self.create_line(*pointlist, **d) + + self._item_ids.append(new_item) + +@@ -433,7 +434,7 @@ class BaseTKCanvas(tk.Canvas, piddle.Canvas): + # unless I keep a copy of this PhotoImage, it seems to be garbage collected + # and the image is removed from the display after this function. weird + itk = ImageTk.PhotoImage(myimage, master=self) +- new_item = self.create_image(x1, y1, image=itk, anchor=Tkinter.NW) ++ new_item = self.create_image(x1, y1, image=itk, anchor=tkinter.NW) + self._item_ids.append(new_item) + self._images.append(itk) + +--- src/piddle/piddleVCR.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleVCR.py +@@ -167,7 +167,7 @@ class VCRCanvas( Canvas ): + + def drawCurve(self, x1,y1, x2,y2, x3,y3, x4,y4, + edgeColor=None, edgeWidth=None, fillColor=None, closed=0): +- "Draw a Bézier curve with control points x1,y1 to x4,y4." ++ "Draw a Bezier curve with control points x1,y1 to x4,y4." + + self._recordfunc("drawCurve", x1,y1,x2,y2,x3,y3,x4,y4,edgeColor,edgeWidth,fillColor,closed) + +@@ -223,7 +223,7 @@ class VCRCanvas( Canvas ): + self._recordfunc("drawFigure", partList, edgeColor,edgeWidth,fillColor,closed) + + def drawImage(self, image, x1,y1, x2=None,y2=None): +- print "Warning!!! piddleVCR does not implent drawImage" ++ print("Warning!!! piddleVCR does not implent drawImage") + # These are thoughts on how to implement this using a shelf to store image + # it kept everyting contained in one file + # import shelve +--- src/piddle/piddleWX.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleWX.py +@@ -159,11 +159,11 @@ class _WXCanvasDefaultStatusBar(wxStatusBar): + self.redraw() + + def OnOver(self, x, y): +- self.text = `x` + "," + `y` ++ self.text = repr(x) + "," + repr(y) + self.redraw() + + def OnClick(self, x, y): +- self.text = `x` + "," + `y` ++ self.text = repr(x) + "," + repr(y) + self.click.SetValue(true) + self.redraw() + +--- src/piddle/piddleWxDc.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddleWxDc.py +@@ -56,7 +56,7 @@ class PiddleWxDc(piddle.Canvas): + if default_color is not None: + return self._getWXbrush(default_color) + else: +- raise "WXcanvas error: Cannot create brush." ++ raise("WXcanvas error: Cannot create brush.") + + return wxBrush(wxcolor) + +@@ -75,7 +75,7 @@ class PiddleWxDc(piddle.Canvas): + if default_color is not None: + return self._getWXpen(width, default_color) + else: +- raise "WXcanvas error: Cannot create pen." ++ raise("WXcanvas error: Cannot create pen.") + + return wxPen(wxcolor, width) + +@@ -236,7 +236,7 @@ class PiddleWxDc(piddle.Canvas): + # instead of just 2-tuples. Therefore, pointlist must be re-created as + # only 2-tuples + +- pointlist = map(lambda i: tuple(i), pointlist) ++ pointlist = [tuple(i) for i in pointlist] + if closed == 1: + pointlist.append(pointlist[0]) + +@@ -260,11 +260,11 @@ class PiddleWxDc(piddle.Canvas): + try: + from PIL import Image + except ImportError: +- print 'PIL not installed as package' ++ print('PIL not installed as package') + try: + import Image + except ImportError: +- raise "PIL not available!" ++ raise("PIL not available!") + + if (x2 and y2 and x2>x1 and y2>y1): + imgPil = image.resize((x2-x1,y2-y1)) +--- src/piddle/piddletest.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/piddletest.py +@@ -40,8 +40,8 @@ def basics(canvasClass): + def drawBasics(canvas): + saver = StateSaver(canvas) # leave canvas state as you found it, restores state when leaves scope + canvas.defaultLineColor = Color(0.7,0.7,1.0) # light blue +- canvas.drawLines( map(lambda i:(i*10,0,i*10,300), range(30)) ) +- canvas.drawLines( map(lambda i:(0,i*10,300,i*10), range(30)) ) ++ canvas.drawLines( [(i*10,0,i*10,300) for i in range(30)] ) ++ canvas.drawLines( [(0,i*10,300,i*10) for i in range(30)] ) + canvas.defaultLineColor = black + + canvas.drawLine(10,200, 20,190, color=red) +@@ -222,8 +222,8 @@ def drawRotstring(canvas): + canvas.drawString(s2, 150, 150, angle=ang) + cnum = (cnum+1) % len(colors) + +- canvas.drawString( "This is a\nrotated\nmulti-line string!!!", 350, 100, angle= -90, font=Font(underline=1) ) +- #canvas.drawString( "This is a\nrotated\nmulti-line string!!!", 400, 175, angle= -45, font=Font(underline=1) ) ++ canvas.drawString( "This is a\nrotated\nmulti-line string!!!", 350, 100, angle= -90, font=Font(underline=1) ) ++ #canvas.drawString( "This is a\nrotated\nmulti-line string!!!", 400, 175, angle= -45, font=Font(underline=1) ) + return canvas + + #---------------------------------------------------------------------- +@@ -232,32 +232,32 @@ def tkTest(testfunc): + # piddleTK tests are called from here because need TK's event loop + try : + import piddleTK +- import Tkinter ++ import tkinter + except: +- print "A module needed for piddleTK is not available, select another backend" ++ print("A module needed for piddleTK is not available, select another backend") + return + +- root = Tkinter.Tk() +- frame = Tkinter.Frame(root) # label='piddletestTK' ++ root = tkinter.Tk() ++ frame = tkinter.Frame(root) # label='piddletestTK' + + #tkcanvas = piddleTK.TKCanvas(size=(400,400), name='piddletestTK', master = frame) +- # try new Tk canvas +- tkcanvas = piddleTK.TKCanvas(size=(400,400), name='piddletestTK', master = frame) +- bframe = Tkinter.Frame(root) ++ # try new Tk canvas ++ tkcanvas = piddleTK.TKCanvas(size=(400,400), name='piddletestTK', master = frame) ++ bframe = tkinter.Frame(root) + +- minimalB=Tkinter.Button(bframe, text='minimal test', +- command= lambda c=tkcanvas : (c.clear(),drawMinimal(c), c.flush())).pack(side=Tkinter.LEFT) +- basicB = Tkinter.Button(bframe, text='basic test', +- command= lambda c=tkcanvas: (c.clear(),drawBasics(c),c.flush()) ).pack(side=Tkinter.LEFT) +- spectB =Tkinter.Button(bframe, text='spectrum test', +- command= lambda c=tkcanvas: (c.clear(),drawSpectrum(c),c.flush()) ).pack(side=Tkinter.LEFT) +- stringsB = Tkinter.Button(bframe, text='strings test', +- command= lambda c=tkcanvas:(c.clear(),drawStrings(c),c.flush()) ).pack(side=Tkinter.LEFT) +- rotstrB = Tkinter.Button(bframe, text='rotated strings test', +- command= lambda c=tkcanvas:(c.clear(), drawRotstring(c),c.flush()) ).pack(side=Tkinter.LEFT) +- advancedB = Tkinter.Button(bframe, text='advanced test', +- command= lambda c=tkcanvas:(c.clear(), drawAdvanced(c),c.flush() ) ).pack(side=Tkinter.LEFT) +- bframe.pack(side=Tkinter.TOP) ++ minimalB=tkinter.Button(bframe, text='minimal test', ++ command= lambda c=tkcanvas : (c.clear(),drawMinimal(c), c.flush())).pack(side=tkinter.LEFT) ++ basicB = tkinter.Button(bframe, text='basic test', ++ command= lambda c=tkcanvas: (c.clear(),drawBasics(c),c.flush()) ).pack(side=tkinter.LEFT) ++ spectB =tkinter.Button(bframe, text='spectrum test', ++ command= lambda c=tkcanvas: (c.clear(),drawSpectrum(c),c.flush()) ).pack(side=tkinter.LEFT) ++ stringsB = tkinter.Button(bframe, text='strings test', ++ command= lambda c=tkcanvas:(c.clear(),drawStrings(c),c.flush()) ).pack(side=tkinter.LEFT) ++ rotstrB = tkinter.Button(bframe, text='rotated strings test', ++ command= lambda c=tkcanvas:(c.clear(), drawRotstring(c),c.flush()) ).pack(side=tkinter.LEFT) ++ advancedB = tkinter.Button(bframe, text='advanced test', ++ command= lambda c=tkcanvas:(c.clear(), drawAdvanced(c),c.flush() ) ).pack(side=tkinter.LEFT) ++ bframe.pack(side=tkinter.TOP) + frame.pack() + # try to draw before running mainloop + if testfunc== minimal: +@@ -272,11 +272,11 @@ def tkTest(testfunc): + drawStrings(tkcanvas) + elif testfunc == rotstring : + drawRotstring(tkcanvas) +- else : +- print "Illegal testfunc handed to tkTest" +- raise "Unsupported testfunc" ++ else: ++ print("Illegal testfunc handed to tkTest") ++ raise("Unsupported testfunc") + +- tkcanvas.flush() ++ tkcanvas.flush() + + root.mainloop() + root.destroy() +@@ -287,25 +287,25 @@ def wxTest(testfunc): + import piddleWX + from wxPython.wx import wxApp + except: +- print "A module needed for piddleWX is not available, select another backend" ++ print("A module needed for piddleWX is not available, select another backend") + return + +- global wx_app +- if not globals().has_key("wx_app"): +- class CanvasApp(wxApp): +- "The wxApp that runs canvas. Initializes windows, and handles redrawing" +- def OnInit(self): +- return 1 ++ global wx_app ++ if "wx_app" not in globals(): ++ class CanvasApp(wxApp): ++ "The wxApp that runs canvas. Initializes windows, and handles redrawing" ++ def OnInit(self): ++ return 1 + +- wx_app = CanvasApp(0) ++ wx_app = CanvasApp(0) + + # run the test, passing the canvas class and returning the canvas + canvas = testfunc(piddleWX.WXCanvas) +- ++ + canvas.flush() + +- # Run the main loop +- wx_app.MainLoop() ++ # Run the main loop ++ wx_app.MainLoop() + + + def runtest(backend, testfunc): +@@ -330,15 +330,15 @@ def runtest(backend, testfunc): + + # do post-test cleanup + canvas.flush() +- # handle save's here ++ # handle save's here + if backend == 'piddlePIL': + canvas.save(format='png') # save as a PNG file +- elif backend == 'piddleVCR': ++ elif backend == 'piddleVCR': + filename = canvas.name + ".vcr" + canvas.save(filename) +- print filename, "saved" +- else: # if backend == 'piddlePS' or backend== 'piddlePDF': +- canvas.save() # should be "pass'ed" by Canvas's that don't use save ++ print(filename, "saved") ++ else: # if backend == 'piddlePS' or backend== 'piddlePDF': ++ canvas.save() # should be "pass'ed" by Canvas's that don't use save + + + def mainLoop(): +@@ -358,34 +358,34 @@ def mainLoop(): + else: bflag = '' + if i == test: tflag = '==>' + else: tflag = '' +- print "%10s %-20s %10s %-20s" % (bflag, bstr, tflag, tstr) ++ print("%10s %-20s %10s %-20s" % (bflag, bstr, tflag, tstr)) + i = i+1 +- print ++ print() + +- inp = raw_input("Selection (0 to exit): ") +- print ++ inp = input("Selection (0 to exit): ") ++ print() + + if inp == '0': return + if inp: + testinp = '' + if inp[-1] in string.letters: testinp = inp[-1] + elif inp[0] in string.letters: testinp = inp[0] +- backinp = string.join(filter(lambda x:x in '0123456789',inp)) ++ backinp = string.join([x for x in inp if x in '0123456789']) + if backinp: + backend = int(backinp)-1 + if backend < len(backends): + docstr = __import__(backends[backend]).__doc__ +- if docstr: print docstr +- else: print "" ++ if docstr: print(docstr) ++ else: print("") + else: backend = None + if testinp: + test = ord(string.upper(testinp[0])) - ord('A') + if test >= 0 and test < len(tests): + docstr = tests[test].__doc__ + if docstr: +- print docstr ++ print(docstr) + else: test = None +- print ++ print() + + # now, if we have a valid backend and test, run it + if backend != None and test != None: +--- src/piddle/polyfunc.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/polyfunc.py +@@ -168,7 +168,7 @@ class AffineMatrix: + + # would like to reshape the sequence, do w/ a loop for now + res = [] +- for ii in xrange(0,N, 2): ++ for ii in range(0,N, 2): + pt = self.transformPt( (seq[ii], seq[ii+1]) ) + res.extend(pt) + +@@ -214,7 +214,7 @@ class CoordFrame: + def drawCircleAt(canvas, x,y,r, **kw): + # useful for marking a particular location w/ a glyph + df = canvas.__class__ +- apply(df.drawEllipse, (canvas, x-r, y-r, x+r,y+r), kw) ++ df.drawEllipse(*(canvas, x-r, y-r, x+r,y+r), **kw) + + + def drawCubicPolynomial(canvas, frame, xinterval, A=0.0, B=0.0, C=0.0, D=0.0, +@@ -288,7 +288,7 @@ def runtest(): + frame.drawFrameBox(canvas) + # draw a family of quadratics w/in the box w/ x-intercept x=0 + NA = 10.0 +- for AA in xrange(1,NA,2): ++ for AA in range(1,NA,2): + drawQuad(canvas, frame, xinterval=(-10,10), A= AA/NA, B=0.0) + + # now some other assorted quadratics +@@ -316,7 +316,7 @@ def runtest(): + + + if __name__== '__main__': +- print 'Running test drawing assorted quadratics to qtest.ps' ++ print('Running test drawing assorted quadratics to qtest.ps') + import piddle + runtest() + +--- src/piddle/stringformat.py.orig 2002-06-03 13:46:30 UTC ++++ src/piddle/stringformat.py +@@ -139,14 +139,14 @@ class StringSegment: + return y + + def dump(self): +- print "StringSegment: ]%s[" % self.s +- print "\tsuper = ", self.super +- print "\tsub = ", self.sub +- print "\tbold = ", self.bold +- print "\titalic = ",self.italic +- print "\tunderline = ", self.underline +- print "\twidth = ", self.width +- print "\tgreek = ", self.greek ++ print("StringSegment: ]%s[" % self.s) ++ print("\tsuper = ", self.super) ++ print("\tsub = ", self.sub) ++ print("\tbold = ", self.bold) ++ print("\titalic = ",self.italic) ++ print("\tunderline = ", self.underline) ++ print("\twidth = ", self.width) ++ print("\tgreek = ", self.greek) + + #------------------------------------------------------------------ + # The StringFormatter will be able to format the following xml +@@ -245,7 +245,7 @@ class StringFormatter(xmllib.XMLParser): + } + + # automatically add handlers for all of the greek characters +- for item in greekchars.keys(): ++ for item in list(greekchars.keys()): + self.elements[item] = (lambda attr,self=self,letter=greekchars[item]: \ + self.start_greek(attr,letter), self.end_greek) + +@@ -253,7 +253,7 @@ class StringFormatter(xmllib.XMLParser): + self.greek = 0 + # set up dictionary for greek characters, this is a class variable + # should I copy it and then update it? +- for item in greekchars.keys(): ++ for item in list(greekchars.keys()): + self.entitydefs[item] = '<%s/>' % item + + #---------------------------------------------------------------- +@@ -386,15 +386,15 @@ def test1(): + drawString(canvas,"hello therehi",10,20) + drawString(canvas,"hello!",10,40) + +- print "'hello!' width = ", stringWidth(canvas,"hello!") +- print "'hello!' PIDDLE width = ", canvas.stringWidth("hello!") ++ print("'hello!' width = ", stringWidth(canvas,"hello!")) ++ print("'hello!' PIDDLE width = ", canvas.stringWidth("hello!")) + + drawString(canvas, "hello! goodbye", 10,60) +- print "'hello! goodbye' width = ", stringWidth(canvas,"hello! goodbye") ++ print("'hello! goodbye' width = ", stringWidth(canvas,"hello! goodbye")) + drawString(canvas, "hello!", 10,80, Font(bold=1)) +- print "'hello!' Font(bold=1) PIDDLE width = ", canvas.stringWidth("hello!",Font(bold=1)) ++ print("'hello!' Font(bold=1) PIDDLE width = ", canvas.stringWidth("hello!",Font(bold=1))) + drawString(canvas, " goodbye", 10,100) +- print "' goodbye' PIDDLE width = ", canvas.stringWidth(" goodbye") ++ print("' goodbye' PIDDLE width = ", canvas.stringWidth(" goodbye")) + canvas.flush() + + def test2(): +@@ -493,21 +493,21 @@ def stringformatTest(): + "bold+sub hello underline+super") + + # break down the various string widths +- print 'sw("bold+sub") = ', stringWidth(canvas,"bold+sub") +- print 'sw(" hello ") = ', stringWidth(canvas," hello ") +- print 'sw("underline+super") = ', \ +- stringWidth(canvas,"underline+super") ++ print('sw("bold+sub") = ', stringWidth(canvas,"bold+sub")) ++ print('sw(" hello ") = ', stringWidth(canvas," hello ")) ++ print('sw("underline+super") = ', \ ++ stringWidth(canvas,"underline+super")) + + pwidth1 = canvas.stringWidth("bold+sub",Font(size=canvas.defaultFont.size-sizedelta, bold=1)) +- print "pwidth1 = ", pwidth1 ++ print("pwidth1 = ", pwidth1) + pwidth2 = canvas.stringWidth(" hello ") +- print "pwidth2 = ", pwidth2 ++ print("pwidth2 = ", pwidth2) + pwidth3 = canvas.stringWidth("underline+super", + Font(size=canvas.defaultFont.size-sizedelta,underline=1)) +- print "pwidth3 = ", pwidth3 ++ print("pwidth3 = ", pwidth3) + + # these should be the same +- print "sfwidth = ", sfwidth, " pwidth = ", pwidth1+pwidth2+pwidth3 ++ print("sfwidth = ", sfwidth, " pwidth = ", pwidth1+pwidth2+pwidth3) + + ################################################### testing greek characters + # looks better in a larger font +@@ -515,21 +515,21 @@ def stringformatTest(): + x = 10 + y = canvas.defaultFont.size*1.5 + drawString(canvas,"α β Δ ",x,y, Font(size=16), color = blue) +- print "line starting with alpha should be font size 16" ++ print("line starting with alpha should be font size 16") + y = y+30 + drawString(canvas,"ϵ η Γ ",x,y, color = green) + y = y+30 + drawString(canvas,"ι κ Λ ",x,y, color = blue) + y = y+30 + drawString(canvas,"μ ν Ω ",x,y, color = green) +- print "mu should be underlined, Omega should be big and bold" ++ print("mu should be underlined, Omega should be big and bold") + y = y+30 + drawString(canvas,"ο Φ φ ",x,y, color = blue) + y = y+30 + drawString(canvas,"Π π ϖ ψ ρ",x,y, color = green) + y = y+30 + drawString(canvas,"Σ σ ς ",x,y, color = blue) +- print "line starting with sigma should be completely underlined" ++ print("line starting with sigma should be completely underlined") + y = y+30 + drawString(canvas,"Θ θ ϑ ξ ζ",x,y, color = green) + y= y+30 diff --git a/graphics/poppler-glib/Makefile b/graphics/poppler-glib/Makefile index 639d50d648e..3a0237c47bf 100644 --- a/graphics/poppler-glib/Makefile +++ b/graphics/poppler-glib/Makefile @@ -1,6 +1,6 @@ # Created by: Michael Johnson -PORTREVISION= 0 +PORTREVISION= 1 COMMENT= GLib bindings to poppler diff --git a/graphics/poppler-qt5/Makefile b/graphics/poppler-qt5/Makefile index 7c907c3f7ed..326d6e35fff 100644 --- a/graphics/poppler-qt5/Makefile +++ b/graphics/poppler-qt5/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 COMMENT= Qt 5 bindings to poppler diff --git a/graphics/poppler-utils/Makefile b/graphics/poppler-utils/Makefile index e762bf7224b..e9579593b9b 100644 --- a/graphics/poppler-utils/Makefile +++ b/graphics/poppler-utils/Makefile @@ -1,6 +1,6 @@ # Created by: Michael Nottebrock -PORTREVISION= 0 +PORTREVISION= 1 COMMENT= Poppler's xpdf-workalike command line utilities # ' diff --git a/graphics/poppler/Makefile b/graphics/poppler/Makefile index 1df74a14dd0..d17e2b04214 100644 --- a/graphics/poppler/Makefile +++ b/graphics/poppler/Makefile @@ -2,6 +2,7 @@ PORTNAME= poppler DISTVERSION= 21.12.0 +PORTREVISION= 1 CATEGORIES= graphics print MASTER_SITES= https://poppler.freedesktop.org/ diff --git a/graphics/proj-data/Makefile b/graphics/proj-data/Makefile index 36917c43e22..025117e17b4 100644 --- a/graphics/proj-data/Makefile +++ b/graphics/proj-data/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= proj-data -PORTVERSION= 1.8 +PORTVERSION= 1.9 CATEGORIES= graphics geography MASTER_SITES= https://download.osgeo.org/proj/ diff --git a/graphics/proj-data/distinfo b/graphics/proj-data/distinfo index dd97b5371d5..524250a112b 100644 --- a/graphics/proj-data/distinfo +++ b/graphics/proj-data/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641661762 -SHA256 (proj-data-1.8.tar.gz) = 1e732dcee36d0829d87cecd70993e48b23305a3a54ca6346e5f094dcdc9a8e28 -SIZE (proj-data-1.8.tar.gz) = 586765549 +TIMESTAMP = 1647264434 +SHA256 (proj-data-1.9.tar.gz) = 8cf8507555e99cea1eeeeb23595211ffe7ae016254ba674477533e167ee7bcb2 +SIZE (proj-data-1.9.tar.gz) = 589748963 diff --git a/graphics/proj-data/pkg-plist b/graphics/proj-data/pkg-plist index 8533db0990f..8bcca65e3e9 100644 --- a/graphics/proj-data/pkg-plist +++ b/graphics/proj-data/pkg-plist @@ -23,6 +23,7 @@ %%DATADIR%%/au_icsm_README.txt %%DATADIR%%/be_ign_README.txt %%DATADIR%%/be_ign_bd72lb72_etrs89lb08.tif +%%DATADIR%%/be_ign_hBG18.tif %%DATADIR%%/br_ibge_CA61_003.tif %%DATADIR%%/br_ibge_CA7072_003.tif %%DATADIR%%/br_ibge_README.txt @@ -170,6 +171,7 @@ %%DATADIR%%/nl_nsgi_nlgeo2018.tif %%DATADIR%%/nl_nsgi_rdcorr2018.tif %%DATADIR%%/nl_nsgi_rdtrans2018.tif +%%DATADIR%%/no_kv_CD_above_Ell_ETRS89_v2021a.tif %%DATADIR%%/no_kv_ETRS89NO_NGO48_TIN.json %%DATADIR%%/no_kv_HREF2018B_NN2000_EUREF89.tif %%DATADIR%%/no_kv_NKGETRF14_EPSG7922_2000.tif @@ -234,6 +236,9 @@ %%DATADIR%%/nz_linz_stisht1977-nzvd2016.tif %%DATADIR%%/nz_linz_taraht1970-nzvd2016.tif %%DATADIR%%/nz_linz_wellht1953-nzvd2016.tif +%%DATADIR%%/pl_gugik_README.txt +%%DATADIR%%/pl_gugik_geoid2011-PL-EVRF2007-NH.tif +%%DATADIR%%/pl_gugik_geoid2011-PL-KRON86-NH.tif %%DATADIR%%/pt_dgt_D73_ETRS89_geo.tif %%DATADIR%%/pt_dgt_DLx_ETRS89_geo.tif %%DATADIR%%/pt_dgt_README.txt diff --git a/graphics/py-beziers/Makefile b/graphics/py-beziers/Makefile index 91ee1040473..8837d4e60fa 100644 --- a/graphics/py-beziers/Makefile +++ b/graphics/py-beziers/Makefile @@ -2,6 +2,7 @@ PORTNAME= beziers PORTVERSION= 0.4.0 +PORTREVISION= 1 CATEGORIES= graphics python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/graphics/py-beziers/files/patch-setup.py b/graphics/py-beziers/files/patch-setup.py new file mode 100644 index 00000000000..b71801d2992 --- /dev/null +++ b/graphics/py-beziers/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2021-10-14 13:10:52 UTC ++++ setup.py +@@ -22,7 +22,7 @@ config = { + "Development Status :: 4 - Beta" + + ], +- 'packages': find_packages(), ++ 'packages': find_packages(exclude=['test*']), + } + + if __name__ == '__main__': diff --git a/graphics/py-h3/Makefile b/graphics/py-h3/Makefile index 43f53e28d3d..19ada687f88 100644 --- a/graphics/py-h3/Makefile +++ b/graphics/py-h3/Makefile @@ -30,6 +30,11 @@ NUMPY_DESC= NumPy support NUMPY_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}numpy>=0,1:math/py-numpy@${PY_FLAVOR} +post-install: + ${FIND} ${STAGEDIR}${PYTHON_SITELIBDIR} -name '*.so' -exec ${STRIP_CMD} {} + + ${PYTHON_CMD} -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} + ${PYTHON_CMD} -O -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} + do-test: cd ${WRKSRC} && ${SETENV} PYTHONPATH=${STAGEDIR}${PYTHON_SITELIBDIR} ${PYTHON_CMD} -m pytest -rs -v diff --git a/graphics/py-imageio/Makefile b/graphics/py-imageio/Makefile index 3c862159d68..775edb5c80c 100644 --- a/graphics/py-imageio/Makefile +++ b/graphics/py-imageio/Makefile @@ -1,5 +1,5 @@ PORTNAME= imageio -PORTVERSION= 2.14.1 +PORTVERSION= 2.16.1 CATEGORIES= graphics python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,8 +13,8 @@ LICENSE_FILE= ${WRKSRC}/LICENSE LIB_DEPENDS= libfreeimage.so:graphics/freeimage RUN_DEPENDS= ffmpeg:multimedia/ffmpeg \ ${PYTHON_PKGNAMEPREFIX}imageio-ffmpeg>=0:graphics/py-imageio-ffmpeg@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}numpy>=0,1:math/py-numpy@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pillow>=0:graphics/py-pillow@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}numpy>=1.20.0,1:math/py-numpy@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pillow>=8.3.2:graphics/py-pillow@${PY_FLAVOR} USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils diff --git a/graphics/py-imageio/distinfo b/graphics/py-imageio/distinfo index 832fcb63cc7..4a3633b85aa 100644 --- a/graphics/py-imageio/distinfo +++ b/graphics/py-imageio/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971108 -SHA256 (imageio-2.14.1.tar.gz) = 709c18f800981e4286abe4bd86b6c9b5bb6e285b6b933b5ba0962ef8e7994058 -SIZE (imageio-2.14.1.tar.gz) = 3386657 +TIMESTAMP = 1647264594 +SHA256 (imageio-2.16.1.tar.gz) = 7f123cb23a77ac5abe8ed4e7ad6a60831a82de2c5d123463dcf1d4278c4779d2 +SIZE (imageio-2.16.1.tar.gz) = 3401236 diff --git a/graphics/py-pillow/Makefile b/graphics/py-pillow/Makefile index 9dfdddbedc6..bd76f3d097a 100644 --- a/graphics/py-pillow/Makefile +++ b/graphics/py-pillow/Makefile @@ -1,10 +1,11 @@ # Created by: Kubilay Kocak PORTNAME= pillow -PORTVERSION= 8.2.0 -PORTREVISION= 1 +PORTVERSION= 8.4.0 CATEGORIES= graphics python +MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +DISTNAME= Pillow-${PORTVERSION} MAINTAINER= sunpoet@FreeBSD.org COMMENT= Fork of the Python Imaging Library (PIL) @@ -16,14 +17,9 @@ LICENSE_PERMS= dist-mirror dist-sell pkg-mirror pkg-sell auto-accept TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>0:devel/py-pytest@${PY_FLAVOR} -# Python 3.5+ -USES= cpe python:3.5+ -USE_GITHUB= yes +USES= cpe python:3.6+ USE_PYTHON= autoplist concurrent distutils -GH_ACCOUNT= python-${PORTNAME} -GH_PROJECT= ${PORTNAME:C/p/P/} - CPE_VENDOR= python OPTIONS_DEFINE= FREETYPE JPEG JPEG2000 LCMS OLE PNG RAQM TIFF TKINTER WEBP XCB diff --git a/graphics/py-pillow/distinfo b/graphics/py-pillow/distinfo index a1a68742bb8..f45d2448bfe 100644 --- a/graphics/py-pillow/distinfo +++ b/graphics/py-pillow/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1619193474 -SHA256 (python-pillow-Pillow-8.2.0_GH0.tar.gz) = da86280bbfcd2b7fd87d33ce6f28688bf600600dfdbe85bfc16c562bbd26ca97 -SIZE (python-pillow-Pillow-8.2.0_GH0.tar.gz) = 47898485 +TIMESTAMP = 1647264596 +SHA256 (Pillow-8.4.0.tar.gz) = b8e2f83c56e141920c39464b852de3719dfbfb6e3c99a2d8da0edf4fb33176ed +SIZE (Pillow-8.4.0.tar.gz) = 49368411 diff --git a/graphics/py-pycha/files/patch-2to3 b/graphics/py-pycha/files/patch-2to3 new file mode 100644 index 00000000000..be83c43ce2a --- /dev/null +++ b/graphics/py-pycha/files/patch-2to3 @@ -0,0 +1,104 @@ +--- chavier/gui.py.orig 2011-08-08 19:23:18 UTC ++++ chavier/gui.py +@@ -37,7 +37,7 @@ class GUI(object): + self.main_window.connect('delete_event', self.delete_event) + self.main_window.connect('destroy', self.destroy) + self.main_window.set_default_size(640, 480) +- self.main_window.set_title(u'Chavier') ++ self.main_window.set_title('Chavier') + + vbox = gtk.VBox() + self.main_window.add(vbox) +@@ -59,7 +59,7 @@ class GUI(object): + hpaned.add1(vpaned) + vpaned.show() + +- block1 = self._create_sidebar_block(u'Data sets', ++ block1 = self._create_sidebar_block('Data sets', + self._datasets_notebook_creator) + self._create_dataset("Dataset 1") + block1.set_size_request(-1, 200) +@@ -67,7 +67,7 @@ class GUI(object): + vpaned.add1(block1) + block1.show() + +- block2 = self._create_sidebar_block(u'Options', ++ block2 = self._create_sidebar_block('Options', + self._options_treeview_creator) + vpaned.add2(block2) + block2.show() +@@ -215,7 +215,7 @@ class GUI(object): + box = gtk.VBox(spacing=6) + box.set_border_width(6) + label = gtk.Label() +- label.set_markup(u'%s' % title) ++ label.set_markup('%s' % title) + label.set_alignment(0.0, 0.5) + box.pack_start(label, False, False) + label.show() +@@ -276,7 +276,7 @@ class GUI(object): + return scrolled_window + + def _fill_options_store(self, options, parent_node, types): +- for name, value in options.items(): ++ for name, value in list(options.items()): + value_type = types[name] + if isinstance(value, dict): + current_parent = self.options_store.append(parent_node, +@@ -356,7 +356,7 @@ class GUI(object): + parent = model.iter_parent(parent) + parents.reverse() + parents.append(name) +- label = u'.'.join(parents) ++ label = '.'.join(parents) + + dialog = OptionDialog(self.main_window, label, value, value_type) + response = dialog.run() +@@ -408,7 +408,7 @@ class GUI(object): + + def add_dataset(self, action): + n_pages = self.datasets_notebook.get_n_pages() +- suggested_name = u'Dataset %d' % (n_pages + 1) ++ suggested_name = 'Dataset %d' % (n_pages + 1) + dialog = TextInputDialog(self.main_window, suggested_name) + response = dialog.run() + if response == gtk.RESPONSE_ACCEPT: +@@ -523,18 +523,18 @@ class GUI(object): + + alloc = self.drawing_area.get_allocation() + +- print 'CHART STATE' +- print '-' * 70 +- print 'surface: %d x %d' % (alloc.width, alloc.height) +- print 'area :', self.chart.area +- print +- print 'minxval:', self.chart.minxval +- print 'maxxval:', self.chart.maxxval +- print 'xrange :', self.chart.xrange +- print +- print 'minyval:', self.chart.minyval +- print 'maxyval:', self.chart.maxyval +- print 'yrange :', self.chart.yrange ++ print('CHART STATE') ++ print('-' * 70) ++ print('surface: %d x %d' % (alloc.width, alloc.height)) ++ print('area :', self.chart.area) ++ print() ++ print('minxval:', self.chart.minxval) ++ print('maxxval:', self.chart.maxxval) ++ print('xrange :', self.chart.xrange) ++ print() ++ print('minyval:', self.chart.minyval) ++ print('maxyval:', self.chart.maxyval) ++ print('yrange :', self.chart.yrange) + + def about(self, action=None): + dialog = AboutDialog(self.main_window) +@@ -556,6 +556,6 @@ str_converters = { + str: str, + int: int, + float: float, +- unicode: unicode, ++ str: str, + bool: str2bool, + } diff --git a/graphics/py-pyproj/Makefile b/graphics/py-pyproj/Makefile index c325dd62f37..e05a75e601e 100644 --- a/graphics/py-pyproj/Makefile +++ b/graphics/py-pyproj/Makefile @@ -1,7 +1,7 @@ # Created by: TAOKA Fumiyoshi PORTNAME= pyproj -PORTVERSION= 3.2.1 +PORTVERSION= 3.3.0 CATEGORIES= graphics geography python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,7 +12,7 @@ COMMENT= Cython wrapper to provide python interfaces to PROJ.4 functions LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE -BUILD_DEPENDS= proj>=7.2.0,1:graphics/proj +BUILD_DEPENDS= proj>=8.0.0,1:graphics/proj LIB_DEPENDS= libproj.so:graphics/proj USES= python:3.7+ diff --git a/graphics/py-pyproj/distinfo b/graphics/py-pyproj/distinfo index 6c46d39be07..78a1966461c 100644 --- a/graphics/py-pyproj/distinfo +++ b/graphics/py-pyproj/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632227548 -SHA256 (pyproj-3.2.1.tar.gz) = 4a936093825ff55b24c1fc6cc093541fcf6d0f6d406589ed699e62048ebf3877 -SIZE (pyproj-3.2.1.tar.gz) = 213342 +TIMESTAMP = 1647264598 +SHA256 (pyproj-3.3.0.tar.gz) = ce8bfbc212729e9a643f5f5d77f7a93394e032eda1e2d8799ae902d08add747e +SIZE (pyproj-3.3.0.tar.gz) = 217539 diff --git a/graphics/qgis-ltr/Makefile b/graphics/qgis-ltr/Makefile index 1b5e5861782..d9c5013df84 100644 --- a/graphics/qgis-ltr/Makefile +++ b/graphics/qgis-ltr/Makefile @@ -2,6 +2,7 @@ PORTNAME= qgis DISTVERSION= 3.22.5 +PORTREVISION= 2 CATEGORIES= graphics geography MASTER_SITES= https://qgis.org/downloads/ PKGNAMESUFFIX= -ltr diff --git a/graphics/qgis/Makefile b/graphics/qgis/Makefile index 6fa3b174fce..46417c6ce77 100644 --- a/graphics/qgis/Makefile +++ b/graphics/qgis/Makefile @@ -2,6 +2,7 @@ PORTNAME= qgis DISTVERSION= 3.24.1 +PORTREVISION= 2 CATEGORIES= graphics geography MASTER_SITES= https://qgis.org/downloads/ diff --git a/graphics/rawstudio/Makefile b/graphics/rawstudio/Makefile index 7e51a316445..5da0e498447 100644 --- a/graphics/rawstudio/Makefile +++ b/graphics/rawstudio/Makefile @@ -2,7 +2,7 @@ PORTNAME= rawstudio PORTVERSION= 2.0 -PORTREVISION= 20 +PORTREVISION= 21 CATEGORIES= graphics MASTER_SITES= http://rawstudio.org/files/release/ diff --git a/graphics/rawtherapee/Makefile b/graphics/rawtherapee/Makefile index b8fc2bee038..3ab929d992c 100644 --- a/graphics/rawtherapee/Makefile +++ b/graphics/rawtherapee/Makefile @@ -2,7 +2,7 @@ PORTNAME= rawtherapee PORTVERSION= 5.8 -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= graphics MASTER_SITES= https://rawtherapee.com/shared/source/ \ LOCAL/mandree/ diff --git a/graphics/rubygem-image_processing/Makefile b/graphics/rubygem-image_processing/Makefile index 8880098e89e..19f4f01a1c2 100644 --- a/graphics/rubygem-image_processing/Makefile +++ b/graphics/rubygem-image_processing/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= image_processing -PORTVERSION= 1.12.1 +PORTVERSION= 1.12.2 CATEGORIES= graphics rubygems MASTER_SITES= RG diff --git a/graphics/rubygem-image_processing/distinfo b/graphics/rubygem-image_processing/distinfo index 213864022b2..b13b044383a 100644 --- a/graphics/rubygem-image_processing/distinfo +++ b/graphics/rubygem-image_processing/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1604932781 -SHA256 (rubygem/image_processing-1.12.1.gem) = 21c084fa4ef4286c66cbee1525844b0143f65f2afcf80faa7369f24bc9a6e89c -SIZE (rubygem/image_processing-1.12.1.gem) = 17408 +TIMESTAMP = 1647264846 +SHA256 (rubygem/image_processing-1.12.2.gem) = d3b9e9c5a1cc2607a5214cc28b90d317a03bdd06239584c97535dd73e46f62b8 +SIZE (rubygem/image_processing-1.12.2.gem) = 17408 diff --git a/graphics/rubygem-tanuki_emoji/Makefile b/graphics/rubygem-tanuki_emoji/Makefile index ea007fd5d91..626bcf93380 100644 --- a/graphics/rubygem-tanuki_emoji/Makefile +++ b/graphics/rubygem-tanuki_emoji/Makefile @@ -1,7 +1,7 @@ # Created by: Matthias Fechner PORTNAME= tanuki_emoji -PORTVERSION= 0.5.0 +PORTVERSION= 0.6.0 CATEGORIES= graphics rubygems MASTER_SITES= RG diff --git a/graphics/rubygem-tanuki_emoji/distinfo b/graphics/rubygem-tanuki_emoji/distinfo index e10231f34e5..f4f372bd5fc 100644 --- a/graphics/rubygem-tanuki_emoji/distinfo +++ b/graphics/rubygem-tanuki_emoji/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637598925 -SHA256 (rubygem/tanuki_emoji-0.5.0.gem) = 9abf23cf7d38f0b00357bd55b22b944ebbbdcbab5be342f98f0392df87c280e3 -SIZE (rubygem/tanuki_emoji-0.5.0.gem) = 6562816 +TIMESTAMP = 1647940310 +SHA256 (rubygem/tanuki_emoji-0.6.0.gem) = 4ce91aefed2d076b73fba3eff50e89660c3d25691787a9fe4c0dfabb4218c12a +SIZE (rubygem/tanuki_emoji-0.6.0.gem) = 6564864 diff --git a/graphics/sane-airscan/Makefile b/graphics/sane-airscan/Makefile index 1ac909a715e..5f16ce9b8d0 100644 --- a/graphics/sane-airscan/Makefile +++ b/graphics/sane-airscan/Makefile @@ -1,5 +1,6 @@ PORTNAME= sane-airscan DISTVERSION= 0.99.26 +PORTREVISION= 1 CATEGORIES= graphics MAINTAINER= henry.hu.sh@gmail.com diff --git a/graphics/sane-backends/Makefile b/graphics/sane-backends/Makefile index d71298bf34a..16b330a6d77 100644 --- a/graphics/sane-backends/Makefile +++ b/graphics/sane-backends/Makefile @@ -2,6 +2,7 @@ PORTNAME= sane-backends DISTVERSION= 1.1.1 +PORTREVISION= 1 CATEGORIES= graphics MASTER_SITES= https://gitlab.com/sane-project/backends/uploads/7d30fab4e115029d91027b6a58d64b43/ diff --git a/graphics/shotwell/Makefile b/graphics/shotwell/Makefile index ada5447d742..ac1b05f2244 100644 --- a/graphics/shotwell/Makefile +++ b/graphics/shotwell/Makefile @@ -2,6 +2,7 @@ PORTNAME= shotwell PORTVERSION= 0.30.14 +PORTREVISION= 1 CATEGORIES= graphics gnome MASTER_SITES= GNOME diff --git a/graphics/simple-scan/Makefile b/graphics/simple-scan/Makefile index 9cdbe407f0b..cd5ecb3bd67 100644 --- a/graphics/simple-scan/Makefile +++ b/graphics/simple-scan/Makefile @@ -1,12 +1,9 @@ PORTNAME= simple-scan -PORTVERSION= 40.7 +PORTVERSION= 42.0 CATEGORIES= graphics gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome -PATCH_SITES= https://gitlab.gnome.org/GNOME/${PORTNAME}/-/commit/ -PATCHFILES+= da6626debe00.patch:-p1 # https://gitlab.gnome.org/GNOME/simple-scan/-/merge_requests/202 - MAINTAINER= gnome@FreeBSD.org COMMENT= Simple scanning utility diff --git a/graphics/simple-scan/distinfo b/graphics/simple-scan/distinfo index fa83cd3bfcf..06f17a6ab6b 100644 --- a/graphics/simple-scan/distinfo +++ b/graphics/simple-scan/distinfo @@ -1,5 +1,3 @@ -TIMESTAMP = 1640257856 -SHA256 (gnome/simple-scan-40.7.tar.xz) = 7c551852cb5af7d34aa989f8ad5ede3cbe31828cf8dd5aec2b2b6fdcd1ac3d53 -SIZE (gnome/simple-scan-40.7.tar.xz) = 1480288 -SHA256 (gnome/da6626debe00.patch) = 8dbba91603ce89396b5173604d6bf9d4c3d05b2a7f3861f03ed7ec8d39413d42 -SIZE (gnome/da6626debe00.patch) = 1416 +TIMESTAMP = 1647628515 +SHA256 (gnome/simple-scan-42.0.tar.xz) = ac1f857afd0bc8897dd2045023ad7c5713e5ceefca56b0b3cc5e9a4795329586 +SIZE (gnome/simple-scan-42.0.tar.xz) = 1097464 diff --git a/graphics/swfmill/Makefile b/graphics/swfmill/Makefile index 7f5592563c0..73d75d3b314 100644 --- a/graphics/swfmill/Makefile +++ b/graphics/swfmill/Makefile @@ -2,7 +2,7 @@ PORTNAME= swfmill PORTVERSION= 0.3.6 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= graphics MASTER_SITES= http://swfmill.org/releases/ diff --git a/graphics/synfig/Makefile b/graphics/synfig/Makefile index 98938a69cbc..968ea683214 100644 --- a/graphics/synfig/Makefile +++ b/graphics/synfig/Makefile @@ -3,7 +3,7 @@ PORTNAME= synfig PORTVERSION= 1.2.2 DISTVERSIONPREFIX=v -PORTREVISION= 17 +PORTREVISION= 18 CATEGORIES= graphics devel multimedia MAINTAINER= portmaster@BSDforge.com diff --git a/graphics/synfigstudio/Makefile b/graphics/synfigstudio/Makefile index 6164356c54c..2c313ff9151 100644 --- a/graphics/synfigstudio/Makefile +++ b/graphics/synfigstudio/Makefile @@ -3,7 +3,7 @@ PORTNAME= synfigstudio PORTVERSION= 1.2.2 DISTVERSIONPREFIX=v -PORTREVISION= 17 +PORTREVISION= 18 CATEGORIES= graphics multimedia MAINTAINER= portmaster@BSDforge.com diff --git a/graphics/tinyows/Makefile b/graphics/tinyows/Makefile index dd9eee79ec6..63ba7d5a8ea 100644 --- a/graphics/tinyows/Makefile +++ b/graphics/tinyows/Makefile @@ -2,6 +2,7 @@ PORTNAME= tinyows PORTVERSION= 1.2.0 +PORTREVISION= 1 CATEGORIES= graphics www geography MASTER_SITES= https://download.osgeo.org/mapserver/ diff --git a/graphics/vips/Makefile b/graphics/vips/Makefile index d8cdc575c9e..9918e9b6f1b 100644 --- a/graphics/vips/Makefile +++ b/graphics/vips/Makefile @@ -2,7 +2,7 @@ PORTNAME= vips PORTVERSION= 8.12.2 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= graphics MASTER_SITES= https://github.com/libvips/libvips/releases/download/v${PORTVERSION}/ diff --git a/graphics/waffle/Makefile b/graphics/waffle/Makefile index 5127fe52989..5846c5e09ba 100644 --- a/graphics/waffle/Makefile +++ b/graphics/waffle/Makefile @@ -2,7 +2,7 @@ PORTNAME= waffle DISTVERSION= 1.6.1-15 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= graphics # Wayland xdg-shell support (to run on sway/wayfire/etc. that have removed wl-shell) diff --git a/graphics/wayland/Makefile b/graphics/wayland/Makefile index 48eb4fcfb3c..1f4a415f5c8 100644 --- a/graphics/wayland/Makefile +++ b/graphics/wayland/Makefile @@ -2,6 +2,7 @@ PORTNAME= wayland DISTVERSION= 1.20.0 +PORTREVISION= 1 CATEGORIES= graphics wayland MASTER_SITES= https://wayland.freedesktop.org/releases/ diff --git a/graphics/webp-pixbuf-loader/pkg-plist b/graphics/webp-pixbuf-loader/pkg-plist index 42b7ba98ad0..8f9f1875cd6 100644 --- a/graphics/webp-pixbuf-loader/pkg-plist +++ b/graphics/webp-pixbuf-loader/pkg-plist @@ -1,4 +1,2 @@ lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders/libpixbufloader-webp.so share/thumbnailers/webp-pixbuf.thumbnailer -@postexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true -@postunexec %D/bin/gdk-pixbuf-query-loaders > /dev/null 2>&1 && %D/bin/gdk-pixbuf-query-loaders > %D/lib/gdk-pixbuf-2.0/%%GTK2_VERSION%%/loaders.cache 2>/dev/null || /usr/bin/true diff --git a/graphics/xournal/Makefile b/graphics/xournal/Makefile index 9bb8fc9a3c9..7d981a354c7 100644 --- a/graphics/xournal/Makefile +++ b/graphics/xournal/Makefile @@ -2,7 +2,7 @@ PORTNAME= xournal PORTVERSION= 0.4.8.2016 -PORTREVISION= 32 +PORTREVISION= 33 CATEGORIES= graphics MASTER_SITES= SF diff --git a/graphics/xournalpp/Makefile b/graphics/xournalpp/Makefile index dab1c7f0fed..f2b2ecbc44c 100644 --- a/graphics/xournalpp/Makefile +++ b/graphics/xournalpp/Makefile @@ -1,6 +1,7 @@ PORTNAME= xournalpp DISTVERSIONPREFIX= v DISTVERSION= 1.1.1 +PORTREVISION= 1 CATEGORIES= graphics MAINTAINER= yuri@FreeBSD.org diff --git a/graphics/yafaray/Makefile b/graphics/yafaray/Makefile index 47ac983b438..80e7eacad3d 100644 --- a/graphics/yafaray/Makefile +++ b/graphics/yafaray/Makefile @@ -3,7 +3,7 @@ PORTNAME= yafaray DISTVERSIONPREFIX= v DISTVERSION= 3.5.1 -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= graphics MAINTAINER= yuri@FreeBSD.org diff --git a/irc/hexchat/Makefile b/irc/hexchat/Makefile index cd79aa5b1ea..71adf0a325b 100644 --- a/irc/hexchat/Makefile +++ b/irc/hexchat/Makefile @@ -2,6 +2,7 @@ PORTNAME= hexchat DISTVERSION= 2.16.1 +PORTREVISION= 1 CATEGORIES= irc gnome MASTER_SITES= https://dl.hexchat.net/hexchat/ diff --git a/irc/weechat/Makefile b/irc/weechat/Makefile index be2a28d53df..5408413a469 100644 --- a/irc/weechat/Makefile +++ b/irc/weechat/Makefile @@ -1,7 +1,7 @@ # Created by: clement PORTNAME= weechat -DISTVERSION= 3.4.1 +DISTVERSION= 3.5 CATEGORIES= irc MASTER_SITES= https://weechat.org/files/src/ diff --git a/irc/weechat/distinfo b/irc/weechat/distinfo index 7909ebba6cc..aa39c180cab 100644 --- a/irc/weechat/distinfo +++ b/irc/weechat/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647460117 -SHA256 (weechat-3.4.1.tar.xz) = 7e088109ad5dfbcb08a9a6b1dd70ea8236093fed8a13ee9d9c98881d7b1aeae7 -SIZE (weechat-3.4.1.tar.xz) = 2617856 +TIMESTAMP = 1648391311 +SHA256 (weechat-3.5.tar.xz) = ea904e4cec8edd0bd24f3ea17f6d6dff97ca00ee0571ee972e79e54c8c08170c +SIZE (weechat-3.5.tar.xz) = 2693072 diff --git a/japanese/im-ja/Makefile b/japanese/im-ja/Makefile index 24bdbedc6aa..05c6f56b80e 100644 --- a/japanese/im-ja/Makefile +++ b/japanese/im-ja/Makefile @@ -2,7 +2,7 @@ PORTNAME= im-ja PORTVERSION= 1.5 -PORTREVISION= 13 +PORTREVISION= 14 CATEGORIES= japanese x11-toolkits MASTER_SITES= http://im-ja.sourceforge.net/ diff --git a/java/openjfx14/Makefile b/java/openjfx14/Makefile index 72220ce0b5c..94a35c0cf3c 100644 --- a/java/openjfx14/Makefile +++ b/java/openjfx14/Makefile @@ -1,6 +1,6 @@ PORTNAME= openjfx DISTVERSION= 14.0.2.1+1 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= java x11-toolkits devel MASTER_SITES= https://repo.maven.apache.org/maven2/org/apache/lucene/lucene-core/7.7.1/:core \ https://repo.maven.apache.org/maven2/org/apache/lucene/lucene-grouping/7.7.1/:grouping \ diff --git a/lang/cling/Makefile b/lang/cling/Makefile index 318bfd95682..aee14cd4f88 100644 --- a/lang/cling/Makefile +++ b/lang/cling/Makefile @@ -2,7 +2,7 @@ PORTNAME= cling DISTVERSION= 0.5-2018-08-13 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= lang devel MASTER_SITES= https://root.cern.ch/download/cling/ DISTNAME= ${PORTNAME}_${DISTVERSION:C/^[0-9]\.[0-9]-//}_sources diff --git a/lang/crystal/Makefile b/lang/crystal/Makefile index 46c942add54..be4aa0c5328 100644 --- a/lang/crystal/Makefile +++ b/lang/crystal/Makefile @@ -1,5 +1,6 @@ PORTNAME= crystal DISTVERSION= 1.2.2 +PORTREVISION= 1 CATEGORIES= lang MASTER_SITES= https://dl.unrelenting.technology/crystal/:bootstrap DISTFILES= ${BOOTSTRAP_PATH}:bootstrap @@ -22,7 +23,7 @@ BUILD_DEPENDS= ${LOCALBASE}/bin/llvm-config${LLVM_VERSION}:devel/llvm${BOOTSTRAP git:devel/git RUN_DEPENDS= pkg-config:devel/pkgconf -USES= compiler gmake pkgconfig +USES= compiler gmake pkgconfig ssl USE_GITHUB= yes GH_ACCOUNT= crystal-lang @@ -52,6 +53,15 @@ OPTIONS_SUB= yes IGNORE= not supported on anything but FreeBSD .endif +# crystal needs pkgconfig to determine SSL variants but these are not +# present in FreeBSD base. We splice in the correct info based on +# FreeBSD version and hope it doesn't change during minor releases. +# See post-patch for the substitutions of these patches. +.if ${SSL_DEFAULT} == "base" +EXTRA_PATCHES= ${PATCHDIR}/extra-patch-src_openssl_lib__crypto.cr \ + ${PATCHDIR}/extra-patch-src_openssl_lib__ssl.cr +.endif + post-extract: ${MKDIR} ${WRKSRC}/.build ${CP} ${DISTDIR}/${DIST_SUBDIR}/${BOOTSTRAP_PATH} ${WRKSRC}/.build/crystal @@ -59,6 +69,15 @@ post-extract: ${LN} -s x86_64-freebsd ${WRKSRC}/src/lib_c/aarch64-portbld-freebsd ${LN} -s x86_64-freebsd ${WRKSRC}/src/lib_c/aarch64-freebsd +post-patch: +.if ${SSL_DEFAULT} == "base" && ${OSREL:R} < 15 + @${REINPLACE_CMD} \ + -e 's|CRYSTAL_SSL_VERSION|1.1.1|g' \ + -e 's|CRYSTAL_SSL_LDFLAGS|${OPENSSLLIB}|g' \ + ${WRKSRC}/src/openssl/lib_ssl.cr \ + ${WRKSRC}/src/openssl/lib_crypto.cr +.endif + do-build-DOCS-on: cd ${WRKSRC} && ${SETENV} ${MAKE_ENV} ${MAKE_CMD} ${MAKE_ARGS} docs \ CRYSTAL_CACHE_DIR="${WRKDIR}/cache" diff --git a/lang/crystal/files/extra-patch-src_openssl_lib__crypto.cr b/lang/crystal/files/extra-patch-src_openssl_lib__crypto.cr new file mode 100644 index 00000000000..d935d28e202 --- /dev/null +++ b/lang/crystal/files/extra-patch-src_openssl_lib__crypto.cr @@ -0,0 +1,27 @@ +--- src/openssl/lib_crypto.cr.orig 2021-08-08 15:06:11 UTC ++++ src/openssl/lib_crypto.cr +@@ -1,21 +1,11 @@ + {% begin %} + lib LibCrypto +- {% from_libressl = (`hash pkg-config 2> /dev/null || printf %s false` != "false") && +- (`test -f $(pkg-config --silence-errors --variable=includedir libcrypto)/openssl/opensslv.h || printf %s false` != "false") && +- (`printf "#include \nLIBRESSL_VERSION_NUMBER" | ${CC:-cc} $(pkg-config --cflags --silence-errors libcrypto || true) -E -`.chomp.split('\n').last != "LIBRESSL_VERSION_NUMBER") %} +- {% ssl_version = `hash pkg-config 2> /dev/null && pkg-config --silence-errors --modversion libcrypto || printf %s 0.0.0`.split.last.gsub(/[^0-9.]/, "") %} +- +- {% if from_libressl %} +- LIBRESSL_VERSION = {{ ssl_version }} +- OPENSSL_VERSION = "0.0.0" +- {% else %} +- LIBRESSL_VERSION = "0.0.0" +- OPENSSL_VERSION = {{ ssl_version }} +- {% end %} ++ LIBRESSL_VERSION = "0.0.0" ++ OPENSSL_VERSION = "CRYSTAL_SSL_VERSION" + end + {% end %} + +-@[Link(ldflags: "`command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'`")] ++@[Link(ldflags: "-LCRYSTAL_SSL_LDFLAGS -lcrypto")] + lib LibCrypto + alias Char = LibC::Char + alias Int = LibC::Int diff --git a/lang/crystal/files/extra-patch-src_openssl_lib__ssl.cr b/lang/crystal/files/extra-patch-src_openssl_lib__ssl.cr new file mode 100644 index 00000000000..54181f2f4b9 --- /dev/null +++ b/lang/crystal/files/extra-patch-src_openssl_lib__ssl.cr @@ -0,0 +1,28 @@ +--- src/openssl/lib_ssl.cr.orig 2021-08-08 15:06:09 UTC ++++ src/openssl/lib_ssl.cr +@@ -6,22 +6,12 @@ require "./lib_crypto" + + {% begin %} + lib LibSSL +- {% from_libressl = (`hash pkg-config 2> /dev/null || printf %s false` != "false") && +- (`test -f $(pkg-config --silence-errors --variable=includedir libssl)/openssl/opensslv.h || printf %s false` != "false") && +- (`printf "#include \nLIBRESSL_VERSION_NUMBER" | ${CC:-cc} $(pkg-config --cflags --silence-errors libssl || true) -E -`.chomp.split('\n').last != "LIBRESSL_VERSION_NUMBER") %} +- {% ssl_version = `hash pkg-config 2> /dev/null && pkg-config --silence-errors --modversion libssl || printf %s 0.0.0`.split.last.gsub(/[^0-9.]/, "") %} +- +- {% if from_libressl %} +- LIBRESSL_VERSION = {{ ssl_version }} +- OPENSSL_VERSION = "0.0.0" +- {% else %} +- LIBRESSL_VERSION = "0.0.0" +- OPENSSL_VERSION = {{ ssl_version }} +- {% end %} ++ LIBRESSL_VERSION = "0.0.0" ++ OPENSSL_VERSION = "CRYSTAL_SSL_VERSION" + end + {% end %} + +-@[Link(ldflags: "`command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'`")] ++@[Link(ldflags: "-LCRYSTAL_SSL_LDFLAGS -lssl")] + lib LibSSL + alias Int = LibC::Int + alias Char = LibC::Char diff --git a/lang/ferite/Makefile b/lang/ferite/Makefile index d4849baef40..724511e232a 100644 --- a/lang/ferite/Makefile +++ b/lang/ferite/Makefile @@ -2,7 +2,7 @@ PORTNAME= ferite PORTVERSION= 1.0.2 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= lang MASTER_SITES= SF diff --git a/lang/gnustep-base/Makefile b/lang/gnustep-base/Makefile index 22de6580910..4a7f3970fb3 100644 --- a/lang/gnustep-base/Makefile +++ b/lang/gnustep-base/Makefile @@ -3,7 +3,7 @@ PORTNAME= gnustep-base DISTVERSIONPREFIX= base- DISTVERSION= 1_27_0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= lang devel gnustep MAINTAINER= ports@FreeBSD.org diff --git a/lang/mono/Makefile b/lang/mono/Makefile index 7003f645a52..227a8cf9d40 100644 --- a/lang/mono/Makefile +++ b/lang/mono/Makefile @@ -66,6 +66,7 @@ OPTIONS_SLAVE= MONOLITE .include .if ${ARCH} == aarch64 +EXTRA_PATCHES= ${FILESDIR}/extra-patch-aarch64-race-workaround PLIST= ${.CURDIR}/pkg-plist.aarch64 .endif diff --git a/lang/mono/files/extra-patch-aarch64-race-workaround b/lang/mono/files/extra-patch-aarch64-race-workaround new file mode 100644 index 00000000000..d7dd165bd60 --- /dev/null +++ b/lang/mono/files/extra-patch-aarch64-race-workaround @@ -0,0 +1,26 @@ +https://bugs.freebsd.org/bugzilla/attachment.cgi?id=195063&action=diff#i/lang/mono/files/extra-patch-aarch64-race-workaround_sec1 + +Workaround for Roslyn crash: https://github.com/mono/mono/issues/7017 + +--- mcs/build/profiles/basic.make.orig 2021-11-08 19:42:34.845483000 +0100 ++++ mcs/build/profiles/basic.make 2021-11-08 19:43:00.996003000 +0100 +@@ -47,7 +47,7 @@ LIBRARY_COMPILE = $(BOOT_COMPILE) + # + # Copy from rules.make because I don't know how to unset MCS_FLAGS + # +-USE_MCS_FLAGS = /codepage:$(CODEPAGE) /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) ++USE_MCS_FLAGS = /codepage:$(CODEPAGE) /parallel- /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) + + .PHONY: profile-check do-profile-check + profile-check: +--- mcs/build/rules.make.orig 2021-11-08 19:43:05.777570000 +0100 ++++ mcs/build/rules.make 2021-11-08 19:43:20.886314000 +0100 +@@ -34,7 +34,7 @@ BUILD_TOOLS_PROFILE = build + endif + endif + +-USE_MCS_FLAGS = /codepage:$(CODEPAGE) /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) ++USE_MCS_FLAGS = /codepage:$(CODEPAGE) /parallel- /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) + USE_MBAS_FLAGS = /codepage:$(CODEPAGE) $(LOCAL_MBAS_FLAGS) $(PLATFORM_MBAS_FLAGS) $(PROFILE_MBAS_FLAGS) $(MBAS_FLAGS) + USE_CFLAGS = $(LOCAL_CFLAGS) $(CFLAGS) $(CPPFLAGS) + CSCOMPILE = $(Q_MCS) $(MCS) $(USE_MCS_FLAGS) diff --git a/lang/mono5.10/Makefile b/lang/mono5.10/Makefile index 968a3b9107f..d17bded46c6 100644 --- a/lang/mono5.10/Makefile +++ b/lang/mono5.10/Makefile @@ -67,6 +67,7 @@ OPTIONS_SLAVE= MONOLITE .include .if ${ARCH} == aarch64 +EXTRA_PATCHES= ${FILESDIR}/extra-patch-aarch64-race-workaround PLIST= ${.CURDIR}/pkg-plist.aarch64 .endif diff --git a/lang/mono5.10/files/extra-patch-aarch64-race-workaround b/lang/mono5.10/files/extra-patch-aarch64-race-workaround new file mode 100644 index 00000000000..d7dd165bd60 --- /dev/null +++ b/lang/mono5.10/files/extra-patch-aarch64-race-workaround @@ -0,0 +1,26 @@ +https://bugs.freebsd.org/bugzilla/attachment.cgi?id=195063&action=diff#i/lang/mono/files/extra-patch-aarch64-race-workaround_sec1 + +Workaround for Roslyn crash: https://github.com/mono/mono/issues/7017 + +--- mcs/build/profiles/basic.make.orig 2021-11-08 19:42:34.845483000 +0100 ++++ mcs/build/profiles/basic.make 2021-11-08 19:43:00.996003000 +0100 +@@ -47,7 +47,7 @@ LIBRARY_COMPILE = $(BOOT_COMPILE) + # + # Copy from rules.make because I don't know how to unset MCS_FLAGS + # +-USE_MCS_FLAGS = /codepage:$(CODEPAGE) /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) ++USE_MCS_FLAGS = /codepage:$(CODEPAGE) /parallel- /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) + + .PHONY: profile-check do-profile-check + profile-check: +--- mcs/build/rules.make.orig 2021-11-08 19:43:05.777570000 +0100 ++++ mcs/build/rules.make 2021-11-08 19:43:20.886314000 +0100 +@@ -34,7 +34,7 @@ BUILD_TOOLS_PROFILE = build + endif + endif + +-USE_MCS_FLAGS = /codepage:$(CODEPAGE) /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) ++USE_MCS_FLAGS = /codepage:$(CODEPAGE) /parallel- /nologo /noconfig /deterministic $(LOCAL_MCS_FLAGS) $(PLATFORM_MCS_FLAGS) $(PROFILE_MCS_FLAGS) $(MCS_FLAGS) + USE_MBAS_FLAGS = /codepage:$(CODEPAGE) $(LOCAL_MBAS_FLAGS) $(PLATFORM_MBAS_FLAGS) $(PROFILE_MBAS_FLAGS) $(MBAS_FLAGS) + USE_CFLAGS = $(LOCAL_CFLAGS) $(CFLAGS) $(CPPFLAGS) + CSCOMPILE = $(Q_MCS) $(MCS) $(USE_MCS_FLAGS) diff --git a/lang/njs/Makefile b/lang/njs/Makefile index 53529f671ec..ca2f303d685 100644 --- a/lang/njs/Makefile +++ b/lang/njs/Makefile @@ -13,7 +13,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE USES= cpe -CPE_VENDOR= nginx +CPE_VENDOR= f5 USE_GITHUB= yes GH_TUPLE= nginx:njs:0.7.2 diff --git a/lang/perl5-devel/Makefile b/lang/perl5-devel/Makefile index e91255e5cd9..f083c8db3ad 100644 --- a/lang/perl5-devel/Makefile +++ b/lang/perl5-devel/Makefile @@ -31,7 +31,7 @@ USES= cpe USE_GITHUB= yes GH_ACCOUNT= Perl GH_PROJECT= perl5 -GH_TAGNAME= v5.35.10-15-g7ddf4b5513 +GH_TAGNAME= v5.35.10-22-gf97596086d # Give a hint of where libperl.so can be found. USE_LDCONFIG= ${PREFIX}/${_ARCH_LIB}/CORE diff --git a/lang/perl5-devel/distinfo b/lang/perl5-devel/distinfo index e3829876ad3..52ca60289f2 100644 --- a/lang/perl5-devel/distinfo +++ b/lang/perl5-devel/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1648137117 -SHA256 (perl/Perl-perl5-v5.35.10-15-g7ddf4b5513_GH0.tar.gz) = 6bd1a0e68d0b52208dbaab7cdbc2f7b925f792d36d363f1898cde33fdd78e6ed -SIZE (perl/Perl-perl5-v5.35.10-15-g7ddf4b5513_GH0.tar.gz) = 20109160 +TIMESTAMP = 1648456624 +SHA256 (perl/Perl-perl5-v5.35.10-22-gf97596086d_GH0.tar.gz) = f006d668247bdb969ab3fa24787b160bf0dcee9d764e085fa32530a76f8b73d4 +SIZE (perl/Perl-perl5-v5.35.10-22-gf97596086d_GH0.tar.gz) = 20109856 diff --git a/lang/php80/Makefile b/lang/php80/Makefile index abccb8e1b2e..61b7834559c 100644 --- a/lang/php80/Makefile +++ b/lang/php80/Makefile @@ -1,6 +1,6 @@ PORTNAME= php80 DISTVERSION= 8.0.17 -PORTREVISION?= 0 +PORTREVISION?= 2 CATEGORIES?= lang devel www MASTER_SITES= PHP DISTNAME= php-${DISTVERSION} diff --git a/lang/php80/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h b/lang/php80/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h new file mode 100644 index 00000000000..9a75bc3e86c --- /dev/null +++ b/lang/php80/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h @@ -0,0 +1,11 @@ +--- ext/pcre/pcre2lib/sljit/sljitConfigInternal.h.orig 2022-03-25 09:54:56 UTC ++++ ext/pcre/pcre2lib/sljit/sljitConfigInternal.h +@@ -283,7 +283,7 @@ + /* Instruction cache flush. */ + /****************************/ + +-#if (!defined SLJIT_CACHE_FLUSH && defined __has_builtin) ++#if (!defined SLJIT_CACHE_FLUSH && defined __has_builtin && !defined SLJIT_CONFIG_PPC_32) + #if __has_builtin(__builtin___clear_cache) + + #define SLJIT_CACHE_FLUSH(from, to) \ diff --git a/lang/php81/Makefile b/lang/php81/Makefile index 670df40197c..bd37b77bb08 100644 --- a/lang/php81/Makefile +++ b/lang/php81/Makefile @@ -1,6 +1,6 @@ PORTNAME= php81 DISTVERSION= 8.1.4 -PORTREVISION?= 0 +PORTREVISION?= 2 CATEGORIES?= lang devel www MASTER_SITES= PHP/distributions DISTNAME= php-${DISTVERSION} diff --git a/lang/php81/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h b/lang/php81/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h new file mode 100644 index 00000000000..9a75bc3e86c --- /dev/null +++ b/lang/php81/files/patch-ext_pcre_pcre2lib_sljit_sljitConfigInternal.h @@ -0,0 +1,11 @@ +--- ext/pcre/pcre2lib/sljit/sljitConfigInternal.h.orig 2022-03-25 09:54:56 UTC ++++ ext/pcre/pcre2lib/sljit/sljitConfigInternal.h +@@ -283,7 +283,7 @@ + /* Instruction cache flush. */ + /****************************/ + +-#if (!defined SLJIT_CACHE_FLUSH && defined __has_builtin) ++#if (!defined SLJIT_CACHE_FLUSH && defined __has_builtin && !defined SLJIT_CONFIG_PPC_32) + #if __has_builtin(__builtin___clear_cache) + + #define SLJIT_CACHE_FLUSH(from, to) \ diff --git a/lang/py-lupa/Makefile b/lang/py-lupa/Makefile index 5d45fdb2205..cc4c7c5d154 100644 --- a/lang/py-lupa/Makefile +++ b/lang/py-lupa/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= lupa -PORTVERSION= 1.12 +PORTVERSION= 1.13 CATEGORIES= lang python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/lang/py-lupa/distinfo b/lang/py-lupa/distinfo index a15c5ca2a3c..e8ca05d66c3 100644 --- a/lang/py-lupa/distinfo +++ b/lang/py-lupa/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058062 -SHA256 (lupa-1.12.tar.gz) = 142957755e00849c102586b77fc31b21bfa0a73589fad4cf5b9feb866bcb493a -SIZE (lupa-1.12.tar.gz) = 275772 +TIMESTAMP = 1647264600 +SHA256 (lupa-1.13.tar.gz) = e1d94ac2a630d271027dac2c21d1428771d9ea9d4d88f15f20a7781340f02a4e +SIZE (lupa-1.13.tar.gz) = 1059981 diff --git a/lang/v8-beta/Makefile b/lang/v8-beta/Makefile index 5003b38f138..4708af9128e 100644 --- a/lang/v8-beta/Makefile +++ b/lang/v8-beta/Makefile @@ -1,8 +1,8 @@ # We follow the beta channel on https://omahaproxy.appspot.com/ -# https://chromium.googlesource.com/v8/v8/+/refs/heads/9.9-lkgr +# https://chromium.googlesource.com/v8/v8/+/refs/heads/10.0-lkgr PORTNAME= v8 -PORTVERSION= 9.9.115.5 +PORTVERSION= 10.0.139.6 CATEGORIES= lang MASTER_SITES= LOCAL/sunpoet/v8/${PORTVERSION:R:R} PKGNAMESUFFIX= -beta @@ -44,13 +44,13 @@ CPE_VENDOR= google CONFLICTS_INSTALL= v8 -BUILD_REV= f3be6e847f004923466a350274aae3fbf048c19c -BUILDTOOLS_REV= f78b4b9f33bd8ef9944d5ce643daff1c31880189 -CLANG_REV= df50898d39a35c2467fa89f8d7a6eedac1fd6aa4 -COMMON_REV= 7f36dbc19d31e2aad895c60261ca8f726442bfbb -GOOGLETEST_REV= c9643a2e45ed0cb18b8409c62994be7c6e66dc1b -ICU_REV= b9f6d0a5c5375dc4643f35360d257dba37c1d3e1 -ZLIB_REV= fc5cfd78a357d5bb7735a58f383634faaafe706a +BUILD_REV= 62a6377648eb82cff75e3a12f689400694fbbb63 +BUILDTOOLS_REV= 169eef5b952dea9b285bf1a0a7aff7518a3ed907 +CLANG_REV= 62e2cd966f93a27aab0953ce7c2d83ffd6bdfdcc +COMMON_REV= d115b033c4e53666b535cbd1985ffe60badad082 +GOOGLETEST_REV= ea55f1f52c489535f0d3b583c81529762c9cb5ea +ICU_REV= b867f209e4b56b0a8c01aaaba3882ad41e438c4f +ZLIB_REV= 9538f4194f6e5eff1bd59f2396ed9d05b1a8d801 # Run "gn args --list out/Release" for all variables. # Some parts don't have use_system_* flag, and can be turned on/off by using diff --git a/lang/v8-beta/distinfo b/lang/v8-beta/distinfo index f8675d48428..ef6336ade4e 100644 --- a/lang/v8-beta/distinfo +++ b/lang/v8-beta/distinfo @@ -1,17 +1,17 @@ -TIMESTAMP = 1643970996 -SHA256 (v8/9.9/v8-9.9.115.5.tar.gz) = 0aeb6c06f9afb057e25f05548f15da09992d6d60e88a51450255bca972c58a8c -SIZE (v8/9.9/v8-9.9.115.5.tar.gz) = 27461932 -SHA256 (v8/9.9/build-f3be6e847f004923466a350274aae3fbf048c19c.tar.gz) = be1ddfbbfbfff91dd04f03305683d1d14a9adab30116bc1da4c787be348d3371 -SIZE (v8/9.9/build-f3be6e847f004923466a350274aae3fbf048c19c.tar.gz) = 1409368 -SHA256 (v8/9.9/buildtools-f78b4b9f33bd8ef9944d5ce643daff1c31880189.tar.gz) = b448d95b765fc43c4009572226fd94eb6803d4da1c551ecac2961ac0cb764479 -SIZE (v8/9.9/buildtools-f78b4b9f33bd8ef9944d5ce643daff1c31880189.tar.gz) = 87332 -SHA256 (v8/9.9/clang-df50898d39a35c2467fa89f8d7a6eedac1fd6aa4.tar.gz) = 29abd3fc5d90da43a16977ed6ae8c42ac2cecfe8a733e5d801df12cb387ead40 -SIZE (v8/9.9/clang-df50898d39a35c2467fa89f8d7a6eedac1fd6aa4.tar.gz) = 298198 -SHA256 (v8/9.9/common-7f36dbc19d31e2aad895c60261ca8f726442bfbb.tar.gz) = dabc8a81795c9a29858d643de59f6a9ad3f2208b775fd294106da4f8ca0215d0 -SIZE (v8/9.9/common-7f36dbc19d31e2aad895c60261ca8f726442bfbb.tar.gz) = 10492 -SHA256 (v8/9.9/googletest-c9643a2e45ed0cb18b8409c62994be7c6e66dc1b.tar.gz) = 8749e8686c3f015f5ac19ba2e3055d13a811b665d45239aadb814a7d86efb057 -SIZE (v8/9.9/googletest-c9643a2e45ed0cb18b8409c62994be7c6e66dc1b.tar.gz) = 832987 -SHA256 (v8/9.9/icu-b9f6d0a5c5375dc4643f35360d257dba37c1d3e1.tar.gz) = 4acb63cd6c4a297cf99c47d7af82886bb55403f570945f68c4c6e3c2cdae4318 -SIZE (v8/9.9/icu-b9f6d0a5c5375dc4643f35360d257dba37c1d3e1.tar.gz) = 52291969 -SHA256 (v8/9.9/zlib-fc5cfd78a357d5bb7735a58f383634faaafe706a.tar.gz) = 6580c08233e7d3e3e82d2087ecb2069a12180f4dca217f6fdbcec76581b7dbbf -SIZE (v8/9.9/zlib-fc5cfd78a357d5bb7735a58f383634faaafe706a.tar.gz) = 314407 +TIMESTAMP = 1647264438 +SHA256 (v8/10.0/v8-10.0.139.6.tar.gz) = 80378a7242c0dbac5273884f0d6a209eef3f0e100bf460b862a8a25acb7b3b79 +SIZE (v8/10.0/v8-10.0.139.6.tar.gz) = 27533786 +SHA256 (v8/10.0/build-62a6377648eb82cff75e3a12f689400694fbbb63.tar.gz) = a40bd84480727c8679658f8603c27ba2745c3e3d81feadd2a63face077aec958 +SIZE (v8/10.0/build-62a6377648eb82cff75e3a12f689400694fbbb63.tar.gz) = 1406814 +SHA256 (v8/10.0/buildtools-169eef5b952dea9b285bf1a0a7aff7518a3ed907.tar.gz) = 4bcd975013578a719ef010e090a605e0fc167812b1257e7c97a468a23402edcd +SIZE (v8/10.0/buildtools-169eef5b952dea9b285bf1a0a7aff7518a3ed907.tar.gz) = 86888 +SHA256 (v8/10.0/clang-62e2cd966f93a27aab0953ce7c2d83ffd6bdfdcc.tar.gz) = 73f0ccf32390764b1c463f05d75b340174fefddd0b5f8a920bacc4952673be0a +SIZE (v8/10.0/clang-62e2cd966f93a27aab0953ce7c2d83ffd6bdfdcc.tar.gz) = 300831 +SHA256 (v8/10.0/common-d115b033c4e53666b535cbd1985ffe60badad082.tar.gz) = 568c091ad5ce5291f4bae7e86a25622aa7d6dcfe4efb0b20d0771557a0c57eea +SIZE (v8/10.0/common-d115b033c4e53666b535cbd1985ffe60badad082.tar.gz) = 10496 +SHA256 (v8/10.0/googletest-ea55f1f52c489535f0d3b583c81529762c9cb5ea.tar.gz) = 0f89aad1ce740025f9175b7c188f3e00be982fe3671b02c937dba47e60a0a088 +SIZE (v8/10.0/googletest-ea55f1f52c489535f0d3b583c81529762c9cb5ea.tar.gz) = 834690 +SHA256 (v8/10.0/icu-b867f209e4b56b0a8c01aaaba3882ad41e438c4f.tar.gz) = e07eb4ce9dbdc85774a7bcbc1fe4c5c8f96429fb51be037b51fa46463d992420 +SIZE (v8/10.0/icu-b867f209e4b56b0a8c01aaaba3882ad41e438c4f.tar.gz) = 52380232 +SHA256 (v8/10.0/zlib-9538f4194f6e5eff1bd59f2396ed9d05b1a8d801.tar.gz) = 49ccb41bd1ce2cd94de8d51029ab4bece040ab3385f542a0068ede86ff2c4b84 +SIZE (v8/10.0/zlib-9538f4194f6e5eff1bd59f2396ed9d05b1a8d801.tar.gz) = 318008 diff --git a/lang/v8/Makefile b/lang/v8/Makefile index 901368155f5..6e99466c87c 100644 --- a/lang/v8/Makefile +++ b/lang/v8/Makefile @@ -1,8 +1,8 @@ # We follow the stable channel on https://omahaproxy.appspot.com/ -# https://chromium.googlesource.com/v8/v8/+/refs/heads/9.8-lkgr +# https://chromium.googlesource.com/v8/v8/+/refs/heads/9.9-lkgr PORTNAME= v8 -PORTVERSION= 9.8.177.9 +PORTVERSION= 9.9.115.8 CATEGORIES= lang MASTER_SITES= LOCAL/sunpoet/v8/${PORTVERSION:R:R} DISTFILES= v8-${PORTVERSION}.tar.gz \ @@ -43,13 +43,13 @@ CPE_VENDOR= google CONFLICTS_INSTALL= v8-beta -BUILD_REV= 9cfc74504f0c5093fe6799e70f15bded2423b5b4 -BUILDTOOLS_REV= 075dd7e22837a69189003e4fa84499acf63188cf -CLANG_REV= 336fcfd099995c128bc93e97b8263cc6fc891cc8 +BUILD_REV= f3be6e847f004923466a350274aae3fbf048c19c +BUILDTOOLS_REV= f78b4b9f33bd8ef9944d5ce643daff1c31880189 +CLANG_REV= df50898d39a35c2467fa89f8d7a6eedac1fd6aa4 COMMON_REV= 7f36dbc19d31e2aad895c60261ca8f726442bfbb -GOOGLETEST_REV= 4c5650f68866e3c2e60361d5c4c95c6f335fb64b -ICU_REV= edf883ad2db9c723b058a6a17a146d68d6343143 -ZLIB_REV= efd9399ae01364926be2a38946127fdf463480db +GOOGLETEST_REV= c9643a2e45ed0cb18b8409c62994be7c6e66dc1b +ICU_REV= b9f6d0a5c5375dc4643f35360d257dba37c1d3e1 +ZLIB_REV= fc5cfd78a357d5bb7735a58f383634faaafe706a # Run "gn args --list out/Release" for all variables. # Some parts don't have use_system_* flag, and can be turned on/off by using diff --git a/lang/v8/distinfo b/lang/v8/distinfo index afed509bdbb..f65717a591b 100644 --- a/lang/v8/distinfo +++ b/lang/v8/distinfo @@ -1,17 +1,17 @@ -TIMESTAMP = 1643970994 -SHA256 (v8/9.8/v8-9.8.177.9.tar.gz) = 98cbbad48c1ac97871304a50220e41a18e632dd415b072bc82f9f6178059505d -SIZE (v8/9.8/v8-9.8.177.9.tar.gz) = 27365748 -SHA256 (v8/9.8/build-9cfc74504f0c5093fe6799e70f15bded2423b5b4.tar.gz) = 15703b71589b1d0bf6ee666d62b413ff0ac3b29448a8396e22994d6b7e6ed037 -SIZE (v8/9.8/build-9cfc74504f0c5093fe6799e70f15bded2423b5b4.tar.gz) = 1380406 -SHA256 (v8/9.8/buildtools-075dd7e22837a69189003e4fa84499acf63188cf.tar.gz) = 952ff349a2cbda6ecab58458a9c473c68acd8ae71386f0304c3c877f42bf5468 -SIZE (v8/9.8/buildtools-075dd7e22837a69189003e4fa84499acf63188cf.tar.gz) = 87271 -SHA256 (v8/9.8/clang-336fcfd099995c128bc93e97b8263cc6fc891cc8.tar.gz) = b284869f82815c194926f2fec6fbccbce5b48ae43720173320098d3a49652a0e -SIZE (v8/9.8/clang-336fcfd099995c128bc93e97b8263cc6fc891cc8.tar.gz) = 292729 -SHA256 (v8/9.8/common-7f36dbc19d31e2aad895c60261ca8f726442bfbb.tar.gz) = 5757e6ba719c12168b8787543eb2c9d08630435daa02a04e124ab349ed53bfaa -SIZE (v8/9.8/common-7f36dbc19d31e2aad895c60261ca8f726442bfbb.tar.gz) = 10490 -SHA256 (v8/9.8/googletest-4c5650f68866e3c2e60361d5c4c95c6f335fb64b.tar.gz) = 42cc6bf8f7b502a72af135b10afdae3600e54d29f5a2ec7fe37840629a18dbb5 -SIZE (v8/9.8/googletest-4c5650f68866e3c2e60361d5c4c95c6f335fb64b.tar.gz) = 832346 -SHA256 (v8/9.8/icu-edf883ad2db9c723b058a6a17a146d68d6343143.tar.gz) = d78b88d872c31b88c79eaaa80e96dd7692edde65bf6d36f57dc59af8bc5fcdb5 -SIZE (v8/9.8/icu-edf883ad2db9c723b058a6a17a146d68d6343143.tar.gz) = 51809103 -SHA256 (v8/9.8/zlib-efd9399ae01364926be2a38946127fdf463480db.tar.gz) = edb4335e9be454c741b3bdc2af46955fb37f937b7b40f534b55e4b92c0819150 -SIZE (v8/9.8/zlib-efd9399ae01364926be2a38946127fdf463480db.tar.gz) = 313373 +TIMESTAMP = 1647264436 +SHA256 (v8/9.9/v8-9.9.115.8.tar.gz) = a3229e0b7e2ff873d021013eb58a0d7061902bf17be7fc4ccf9a5d224ba1d7ae +SIZE (v8/9.9/v8-9.9.115.8.tar.gz) = 27462135 +SHA256 (v8/9.9/build-f3be6e847f004923466a350274aae3fbf048c19c.tar.gz) = be1ddfbbfbfff91dd04f03305683d1d14a9adab30116bc1da4c787be348d3371 +SIZE (v8/9.9/build-f3be6e847f004923466a350274aae3fbf048c19c.tar.gz) = 1409368 +SHA256 (v8/9.9/buildtools-f78b4b9f33bd8ef9944d5ce643daff1c31880189.tar.gz) = b448d95b765fc43c4009572226fd94eb6803d4da1c551ecac2961ac0cb764479 +SIZE (v8/9.9/buildtools-f78b4b9f33bd8ef9944d5ce643daff1c31880189.tar.gz) = 87332 +SHA256 (v8/9.9/clang-df50898d39a35c2467fa89f8d7a6eedac1fd6aa4.tar.gz) = 29abd3fc5d90da43a16977ed6ae8c42ac2cecfe8a733e5d801df12cb387ead40 +SIZE (v8/9.9/clang-df50898d39a35c2467fa89f8d7a6eedac1fd6aa4.tar.gz) = 298198 +SHA256 (v8/9.9/common-7f36dbc19d31e2aad895c60261ca8f726442bfbb.tar.gz) = dabc8a81795c9a29858d643de59f6a9ad3f2208b775fd294106da4f8ca0215d0 +SIZE (v8/9.9/common-7f36dbc19d31e2aad895c60261ca8f726442bfbb.tar.gz) = 10492 +SHA256 (v8/9.9/googletest-c9643a2e45ed0cb18b8409c62994be7c6e66dc1b.tar.gz) = 8749e8686c3f015f5ac19ba2e3055d13a811b665d45239aadb814a7d86efb057 +SIZE (v8/9.9/googletest-c9643a2e45ed0cb18b8409c62994be7c6e66dc1b.tar.gz) = 832987 +SHA256 (v8/9.9/icu-b9f6d0a5c5375dc4643f35360d257dba37c1d3e1.tar.gz) = 4acb63cd6c4a297cf99c47d7af82886bb55403f570945f68c4c6e3c2cdae4318 +SIZE (v8/9.9/icu-b9f6d0a5c5375dc4643f35360d257dba37c1d3e1.tar.gz) = 52291969 +SHA256 (v8/9.9/zlib-fc5cfd78a357d5bb7735a58f383634faaafe706a.tar.gz) = 6580c08233e7d3e3e82d2087ecb2069a12180f4dca217f6fdbcec76581b7dbbf +SIZE (v8/9.9/zlib-fc5cfd78a357d5bb7735a58f383634faaafe706a.tar.gz) = 314407 diff --git a/lang/vala/Makefile b/lang/vala/Makefile index 5fc9584fee3..cce14d26a5c 100644 --- a/lang/vala/Makefile +++ b/lang/vala/Makefile @@ -2,6 +2,7 @@ PORTNAME= vala PORTVERSION= 0.48.23 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= lang gnome MASTER_SITES= GNOME diff --git a/mail/astroid/Makefile b/mail/astroid/Makefile index 3f2773f529e..fa211426f07 100644 --- a/mail/astroid/Makefile +++ b/mail/astroid/Makefile @@ -1,7 +1,7 @@ PORTNAME= astroid DISTVERSIONPREFIX= v DISTVERSION= 0.15 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= mail MAINTAINER= mylan.connolly@gmail.com diff --git a/mail/balsa/Makefile b/mail/balsa/Makefile index 489e7784fb0..71560cc4fdc 100644 --- a/mail/balsa/Makefile +++ b/mail/balsa/Makefile @@ -3,7 +3,7 @@ PORTNAME= balsa PORTVERSION= 2.5.1 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= mail gnome MASTER_SITES= http://pawsa.fedorapeople.org/balsa/ diff --git a/mail/cone/Makefile b/mail/cone/Makefile index 31ba2181549..ad7ba221a0f 100644 --- a/mail/cone/Makefile +++ b/mail/cone/Makefile @@ -2,6 +2,7 @@ PORTNAME= cone PORTVERSION= 1.5 +PORTREVISION= 1 CATEGORIES= mail # This is for beta versions, leave it in please: #MASTER_SITES= http://www.courier-mta.org/beta/${PORTNAME}/ \ diff --git a/mail/dovecot-fts-xapian/Makefile b/mail/dovecot-fts-xapian/Makefile index ed04d5a3416..0756d855975 100644 --- a/mail/dovecot-fts-xapian/Makefile +++ b/mail/dovecot-fts-xapian/Makefile @@ -1,10 +1,10 @@ PORTNAME= fts-xapian -DISTVERSION= 1.5.1 -PORTREVISION= 1 +DISTVERSION= 1.5.4 +DISTVERSIONSUFFIX= b CATEGORIES= mail -MASTER_SITES= https://github.com/grosjo/fts-xapian/releases/download/${DISTVERSION}/ +MASTER_SITES= https://github.com/grosjo/fts-xapian/releases/download/${DISTVERSIONFULL}/ PKGNAMEPREFIX= dovecot- -DISTNAME= ${PKGNAMEPREFIX}${PORTNAME}-${DISTVERSION} +DISTNAME= ${PKGNAMEPREFIX}${PORTNAME}-${DISTVERSIONFULL} MAINTAINER= otis@FreeBSD.org COMMENT= Dovecot FTS plugin based on Xapian @@ -16,11 +16,11 @@ LIB_DEPENDS= libicuuc.so:devel/icu \ libxapian.so:databases/xapian-core RUN_DEPENDS= dovecot>=2.3.7:mail/dovecot -USES= autoreconf compiler:c++11-lang libtool pkgconfig +USES= autoreconf compiler:c++11-lang libtool pkgconfig sqlite GNU_CONFIGURE= yes -WRKSRC= ${WRKDIR}/${PORTNAME}-${DISTVERSION} +WRKSRC= ${WRKDIR}/${PORTNAME}-${DISTVERSIONFULL} PLIST_FILES= lib/dovecot/lib21_fts_xapian_plugin.a \ lib/dovecot/lib21_fts_xapian_plugin.so diff --git a/mail/dovecot-fts-xapian/distinfo b/mail/dovecot-fts-xapian/distinfo index e7e0d026258..b49a38017d7 100644 --- a/mail/dovecot-fts-xapian/distinfo +++ b/mail/dovecot-fts-xapian/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1636662908 -SHA256 (dovecot-fts-xapian-1.5.1.tar.gz) = 12b610492849440b96fc8527a98cd537fe14465c4e618eacfd9651101e343f2e -SIZE (dovecot-fts-xapian-1.5.1.tar.gz) = 2896670 +TIMESTAMP = 1648195324 +SHA256 (dovecot-fts-xapian-1.5.4b.tar.gz) = cfc60caade77aadc8441b971d2e49f7337033a1e83494163cd8abcd459c79f6d +SIZE (dovecot-fts-xapian-1.5.4b.tar.gz) = 33138 diff --git a/mail/dovecot-fts-xapian/files/patch-src_fts-backend-xapian.cpp b/mail/dovecot-fts-xapian/files/patch-src_fts-backend-xapian.cpp new file mode 100644 index 00000000000..bab0275d491 --- /dev/null +++ b/mail/dovecot-fts-xapian/files/patch-src_fts-backend-xapian.cpp @@ -0,0 +1,13 @@ +Patch to silence a warning, until adopted by upstream (issue #101) + +--- src/fts-backend-xapian.cpp.orig 2022-03-25 21:16:29 UTC ++++ src/fts-backend-xapian.cpp +@@ -750,7 +750,7 @@ struct fts_backend fts_backend_xapian = + { + .name = "xapian", + .flags = FTS_BACKEND_FLAG_BUILD_FULL_WORDS, +- { ++ .v = { + fts_backend_xapian_alloc, + fts_backend_xapian_init, + fts_backend_xapian_deinit, diff --git a/mail/evolution-ews/Makefile b/mail/evolution-ews/Makefile index 0c272f8a93d..a68676d51ee 100644 --- a/mail/evolution-ews/Makefile +++ b/mail/evolution-ews/Makefile @@ -2,6 +2,7 @@ PORTNAME= evolution-ews DISTVERSION= 3.42.4 +PORTREVISION= 1 CATEGORIES= mail gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/mail/evolution/Makefile b/mail/evolution/Makefile index 5a04ba7c8c1..413b98bcaf7 100644 --- a/mail/evolution/Makefile +++ b/mail/evolution/Makefile @@ -2,6 +2,7 @@ PORTNAME= evolution DISTVERSION= 3.42.4 +PORTREVISION= 1 CATEGORIES= mail gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/mail/geary/Makefile b/mail/geary/Makefile index edf784e38e6..a1a65e41f04 100644 --- a/mail/geary/Makefile +++ b/mail/geary/Makefile @@ -2,7 +2,7 @@ PORTNAME= geary PORTVERSION= 40.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= mail gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/mail/gnubiff/Makefile b/mail/gnubiff/Makefile index 84e89f632fd..e5a7c2b1cc9 100644 --- a/mail/gnubiff/Makefile +++ b/mail/gnubiff/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnubiff PORTVERSION= 2.2.13 -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= mail MASTER_SITES= SF diff --git a/mail/msmtp/Makefile b/mail/msmtp/Makefile index f24c1ae833d..567c8e0342c 100644 --- a/mail/msmtp/Makefile +++ b/mail/msmtp/Makefile @@ -1,7 +1,7 @@ # Created by: R.I.Pienaar PORTNAME= msmtp -PORTVERSION= 1.8.19 +PORTVERSION= 1.8.20 CATEGORIES= mail MASTER_SITES= https://marlam.de/msmtp/releases/ diff --git a/mail/msmtp/distinfo b/mail/msmtp/distinfo index 616d84442c6..c2fb0f29be3 100644 --- a/mail/msmtp/distinfo +++ b/mail/msmtp/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1636085120 -SHA256 (msmtp-1.8.19.tar.xz) = 34a1e1981176874dbe4ee66ee0d9103c90989aa4dcdc4861e4de05ce7e44526b -SIZE (msmtp-1.8.19.tar.xz) = 383100 +TIMESTAMP = 1648219861 +SHA256 (msmtp-1.8.20.tar.xz) = d93ae2aafc0f48af7dc9d0b394df1bb800588b8b4e8d096d8b3cf225344eb111 +SIZE (msmtp-1.8.20.tar.xz) = 385696 diff --git a/mail/py-Products.SecureMailHost/files/patch-2to3 b/mail/py-Products.SecureMailHost/files/patch-2to3 new file mode 100644 index 00000000000..8f5b763bd1e --- /dev/null +++ b/mail/py-Products.SecureMailHost/files/patch-2to3 @@ -0,0 +1,165 @@ +--- Products/SecureMailHost/SecureMailHost.py.orig 2009-09-05 08:33:50 UTC ++++ Products/SecureMailHost/SecureMailHost.py +@@ -16,7 +16,7 @@ + $Id: SecureMailHost.py 96773 2009-09-05 14:33:50Z hannosch $ + """ + +-from config import BAD_HEADERS ++from .config import BAD_HEADERS + from copy import deepcopy + + import email.Message +@@ -201,10 +201,10 @@ class SecureMailBase(MailBase): + if addr: + result = self.validateEmailAddresses(addr) + if not result: +- raise MailHostError, 'Invalid email address: %s' % addr ++ raise MailHostError('Invalid email address: %s' % addr) + result = self.validateSingleEmailAddress(mfrom) + if not result: +- raise MailHostError, 'Invalid email address: %s' % mfrom ++ raise MailHostError('Invalid email address: %s' % mfrom) + + # create message + if isinstance(message, email.Message.Message): +@@ -212,7 +212,7 @@ class SecureMailBase(MailBase): + # change the message + msg = deepcopy(message) + else: +- if isinstance(message, unicode): ++ if isinstance(message, str): + message = message.encode(charset) + msg = email.MIMEText.MIMEText(message, subtype, charset) + +@@ -228,7 +228,7 @@ class SecureMailBase(MailBase): + + for bad in BAD_HEADERS: + if bad in kwargs: +- raise MailHostError, 'Header %s is forbidden' % bad ++ raise MailHostError('Header %s is forbidden' % bad) + self.setHeaderOf(msg, **kwargs) + + # we have to pass *all* recipient email addresses to the +@@ -251,7 +251,7 @@ class SecureMailBase(MailBase): + + All occurences of the key are deleted first! + """ +- for key, val in kwargs.items(): ++ for key, val in list(kwargs.items()): + del msg[key] # save - email.Message won't raise a KeyError + if skipEmpty and not val: + continue +@@ -293,7 +293,7 @@ class SecureMailBase(MailBase): + # stage 2: get a list of address strings using email.formataddr + addresses = [] + for addr in addr_list: +- if isinstance(addr, basestring): ++ if isinstance(addr, str): + addresses.append(email.Utils.formataddr(('', addr))) + else: + if len(addr) != 2: +@@ -311,7 +311,7 @@ class SecureMailBase(MailBase): + """Lower-level function to validate a single normalized email + address, see validateEmailAddress + """ +- if not isinstance(address, basestring): ++ if not isinstance(address, str): + return False + + sub = EMAIL_CUTOFF_RE.match(address); +@@ -329,7 +329,7 @@ class SecureMailBase(MailBase): + def validateSingleEmailAddress(self, address): + """Validate a single email address, see also validateEmailAddresses + """ +- if not isinstance(address, basestring): ++ if not isinstance(address, str): + return False + + sub = EMAIL_CUTOFF_RE.match(address); +@@ -353,7 +353,7 @@ class SecureMailBase(MailBase): + """Validate a list of possibly several email addresses, see + also validateSingleEmailAddress + """ +- if not isinstance(addresses, basestring): ++ if not isinstance(addresses, str): + return False + + sub = EMAIL_CUTOFF_RE.match(addresses); +--- Products/SecureMailHost/tests/common.py.orig 2004-07-18 15:21:52 UTC ++++ Products/SecureMailHost/tests/common.py +@@ -2,12 +2,6 @@ from Testing import ZopeTestCase + + from Products.SecureMailHost.SecureMailHost import SecureMailBase + +-try: +- True +-except NameError: +- True=1 +- False=0 +- + ZopeTestCase.installProduct('MailHost', quiet=1) + ZopeTestCase.installProduct('PageTemplates', quiet=1) + ZopeTestCase.installProduct('PythonScripts', quiet=1) +--- Products/SecureMailHost/tests/framework.py.orig 2004-05-16 01:36:28 UTC ++++ Products/SecureMailHost/tests/framework.py +@@ -52,7 +52,7 @@ if __INSTANCE_HOME.endswith(os.sep): + + # Find and import the Testing package + # +-if not sys.modules.has_key('Testing'): ++if 'Testing' not in sys.modules: + p0 = sys.path[0] + if p0 and __name__ == '__main__': + os.chdir(p0) +@@ -66,12 +66,12 @@ if not sys.modules.has_key('Testing'): + break + p, d = s and ('','') or os.path.split(p) + else: +- print 'Unable to locate Testing package.', +- print 'You might need to set SOFTWARE_HOME.' ++ print('Unable to locate Testing package.', end=' ') ++ print('You might need to set SOFTWARE_HOME.') + sys.exit(1) + + import Testing, unittest +-execfile(os.path.join(os.path.dirname(Testing.__file__), 'common.py')) ++exec(compile(open(os.path.join(os.path.dirname(Testing.__file__), 'common.py'), "rb").read(), os.path.join(os.path.dirname(Testing.__file__), 'common.py'), 'exec')) + + # Include ZopeTestCase support + # +@@ -80,8 +80,8 @@ if 1: # Create a new scope + p = os.path.join(os.path.dirname(Testing.__file__), 'ZopeTestCase') + + if not os.path.isdir(p): +- print 'Unable to locate ZopeTestCase package.', +- print 'You might need to install ZopeTestCase.' ++ print('Unable to locate ZopeTestCase package.', end=' ') ++ print('You might need to install ZopeTestCase.') + sys.exit(1) + + ztc_common = 'ztc_common.py' +@@ -89,19 +89,19 @@ if 1: # Create a new scope + + f = 0 + if os.path.exists(ztc_common_global): +- execfile(ztc_common_global) ++ exec(compile(open(ztc_common_global, "rb").read(), ztc_common_global, 'exec')) + f = 1 + if os.path.exists(ztc_common): +- execfile(ztc_common) ++ exec(compile(open(ztc_common, "rb").read(), ztc_common, 'exec')) + f = 1 + + if not f: +- print 'Unable to locate %s.' % ztc_common ++ print('Unable to locate %s.' % ztc_common) + sys.exit(1) + + # Debug + # +-print 'SOFTWARE_HOME: %s' % os.environ.get('SOFTWARE_HOME', 'Not set') +-print 'INSTANCE_HOME: %s' % os.environ.get('INSTANCE_HOME', 'Not set') ++print('SOFTWARE_HOME: %s' % os.environ.get('SOFTWARE_HOME', 'Not set')) ++print('INSTANCE_HOME: %s' % os.environ.get('INSTANCE_HOME', 'Not set')) + sys.stdout.flush() + diff --git a/mail/py-pyzmail/files/patch-2to3 b/mail/py-pyzmail/files/patch-2to3 index b9b7db7fde8..bc9901de399 100644 --- a/mail/py-pyzmail/files/patch-2to3 +++ b/mail/py-pyzmail/files/patch-2to3 @@ -469,18 +469,18 @@ --- setup.py.orig 2014-02-23 21:47:08 UTC +++ setup.py -@@ -105,14 +105,6 @@ if 'py2exe' in sys.argv and os.name=='nt': - - data_files.append( (doc_dir, [ 'README.txt', 'Changelog.txt', 'LICENSE.txt']) ) - --# support for python 3.x with "distribute" --if sys.version_info >= (3,): -- # avoid setuptools to report unknown options under python 2.X -- extra_options['use_2to3'] = True -- # extra_options['convert_2to3_doctests'] = ['src/your/module'] -- # extra_options['use_2to3_fixers'] = ['your.fixers' ] -- extra_options['install_requires']=['distribute'], # be sure we are using distribute -- - setup(name='pyzmail', - version=version, - author='Alain Spineux', +@@ -105,14 +105,6 @@ if 'py2exe' in sys.argv and os.name=='nt': + + data_files.append( (doc_dir, [ 'README.txt', 'Changelog.txt', 'LICENSE.txt']) ) + +-# support for python 3.x with "distribute" +-if sys.version_info >= (3,): +- # avoid setuptools to report unknown options under python 2.X +- extra_options['use_2to3'] = True +- # extra_options['convert_2to3_doctests'] = ['src/your/module'] +- # extra_options['use_2to3_fixers'] = ['your.fixers' ] +- extra_options['install_requires']=['distribute'], # be sure we are using distribute +- + setup(name='pyzmail', + version=version, + author='Alain Spineux', diff --git a/mail/rspamd-devel/Makefile b/mail/rspamd-devel/Makefile index bba3941384f..7d630b591d1 100644 --- a/mail/rspamd-devel/Makefile +++ b/mail/rspamd-devel/Makefile @@ -1,6 +1,5 @@ PORTNAME= rspamd -PORTVERSION= 3.2.a1.20211214 -PORTREVISION= 1 +PORTVERSION= 3.3.a1.20220326 CATEGORIES= mail PKGNAMESUFFIX= -devel @@ -19,7 +18,7 @@ RUN_DEPENDS= ca_root_nss>=3.*:security/ca_root_nss USES= cmake compiler:c++17-lang cpe gnome perl5 pkgconfig sqlite ssl CPE_VENDOR= ${PORTNAME}_project USE_GITHUB= yes -GH_TAGNAME= 93430bb +GH_TAGNAME= 466de60 USE_GNOME= glib20 USE_LDCONFIG= yes USE_PERL5= build run diff --git a/mail/rspamd-devel/distinfo b/mail/rspamd-devel/distinfo index 57d9bf2e7d6..c43cb4c49a1 100644 --- a/mail/rspamd-devel/distinfo +++ b/mail/rspamd-devel/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1639458536 -SHA256 (rspamd-rspamd-3.2.a1.20211214-93430bb_GH0.tar.gz) = 1c1f672e68bb45dbe741819ed05369364cc0ca8f3661d8dabb4bad86c8f60ada -SIZE (rspamd-rspamd-3.2.a1.20211214-93430bb_GH0.tar.gz) = 5631698 +TIMESTAMP = 1648349709 +SHA256 (rspamd-rspamd-3.3.a1.20220326-466de60_GH0.tar.gz) = 3f4111deda177dbc496e08f6844b115e108119f1b5e6e2deb9657f77962fa109 +SIZE (rspamd-rspamd-3.3.a1.20220326-466de60_GH0.tar.gz) = 5640769 diff --git a/mail/rspamd-devel/pkg-plist b/mail/rspamd-devel/pkg-plist index d29895bbf26..3a3c560ef5d 100644 --- a/mail/rspamd-devel/pkg-plist +++ b/mail/rspamd-devel/pkg-plist @@ -313,6 +313,7 @@ man/man8/rspamd.8.gz %%DATADIR%%/www/css/FooTable.Glyphicons.css %%DATADIR%%/www/css/bootstrap.min.css %%DATADIR%%/www/css/d3evolution.css +%%DATADIR%%/www/css/d3pie.css %%DATADIR%%/www/css/font-glyphicons.css %%DATADIR%%/www/css/footable.standalone.min.css %%DATADIR%%/www/css/nprogress.css diff --git a/mail/rubygem-actionmailbox60/Makefile b/mail/rubygem-actionmailbox60/Makefile index 361530262c2..4ccb93d341c 100644 --- a/mail/rubygem-actionmailbox60/Makefile +++ b/mail/rubygem-actionmailbox60/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= actionmailbox -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= mail rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/mail/rubygem-actionmailbox60/distinfo b/mail/rubygem-actionmailbox60/distinfo index 0898eda5065..53fdf458eb9 100644 --- a/mail/rubygem-actionmailbox60/distinfo +++ b/mail/rubygem-actionmailbox60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058912 -SHA256 (rubygem/actionmailbox-6.0.4.6.gem) = 5b104fcd7f2fcd4a194bf2a250a4e305b9692f1628e9c9067ebd0990190a8edc -SIZE (rubygem/actionmailbox-6.0.4.6.gem) = 20992 +TIMESTAMP = 1647264918 +SHA256 (rubygem/actionmailbox-6.0.4.7.gem) = 97647dc0698ef60bb9666518d4476b6c4cf19af1256edfcebefdde9d5c7aa272 +SIZE (rubygem/actionmailbox-6.0.4.7.gem) = 20992 diff --git a/mail/rubygem-actionmailbox61/Makefile b/mail/rubygem-actionmailbox61/Makefile index 8fe92bffef2..404f2ba3334 100644 --- a/mail/rubygem-actionmailbox61/Makefile +++ b/mail/rubygem-actionmailbox61/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= actionmailbox -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= mail rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/mail/rubygem-actionmailbox61/distinfo b/mail/rubygem-actionmailbox61/distinfo index 6b80e2c162b..2adf855c914 100644 --- a/mail/rubygem-actionmailbox61/distinfo +++ b/mail/rubygem-actionmailbox61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058938 -SHA256 (rubygem/actionmailbox-6.1.4.6.gem) = 9c4e5d3e9ef13366ece4b3b62bee35561d9e493d87590a468bbf10edc89f1662 -SIZE (rubygem/actionmailbox-6.1.4.6.gem) = 22016 +TIMESTAMP = 1647264944 +SHA256 (rubygem/actionmailbox-6.1.4.7.gem) = adcbe213392c71a66a3f47821efa556e82a3c5f5b89006f282edbdfa29d257cf +SIZE (rubygem/actionmailbox-6.1.4.7.gem) = 22016 diff --git a/mail/rubygem-actionmailbox70/Makefile b/mail/rubygem-actionmailbox70/Makefile index 193098b5ddc..e1788e019ce 100644 --- a/mail/rubygem-actionmailbox70/Makefile +++ b/mail/rubygem-actionmailbox70/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= actionmailbox -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= mail rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/mail/rubygem-actionmailbox70/distinfo b/mail/rubygem-actionmailbox70/distinfo index ed1f518fb3e..5fe852905ea 100644 --- a/mail/rubygem-actionmailbox70/distinfo +++ b/mail/rubygem-actionmailbox70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058964 -SHA256 (rubygem/actionmailbox-7.0.2.gem) = 2fa7b721372b1a2a32604a7cdf6a2b0696c91ee6da794fec486144eb7e0c5d72 -SIZE (rubygem/actionmailbox-7.0.2.gem) = 22016 +TIMESTAMP = 1647264970 +SHA256 (rubygem/actionmailbox-7.0.2.3.gem) = 33f41d480bf0cefd04de0ef25b424c78df8908d6668339793577997a844fdd67 +SIZE (rubygem/actionmailbox-7.0.2.3.gem) = 22016 diff --git a/mail/rubygem-actionmailer52/Makefile b/mail/rubygem-actionmailer52/Makefile index 55a630ac546..d46edc17d7e 100644 --- a/mail/rubygem-actionmailer52/Makefile +++ b/mail/rubygem-actionmailer52/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= actionmailer -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= mail rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/mail/rubygem-actionmailer52/distinfo b/mail/rubygem-actionmailer52/distinfo index 851ca010558..de405caafc6 100644 --- a/mail/rubygem-actionmailer52/distinfo +++ b/mail/rubygem-actionmailer52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298704 -SHA256 (rubygem/actionmailer-5.2.6.gem) = 8ddac9dd54bd1f78a50682662a964a109daf9c26575e6bd2ed15c39929b19b3f -SIZE (rubygem/actionmailer-5.2.6.gem) = 29184 +TIMESTAMP = 1647264896 +SHA256 (rubygem/actionmailer-5.2.7.gem) = ff622cbf62259d9235b442e1e98ceecebe8f53e9628c2aca0fb515124512c1b7 +SIZE (rubygem/actionmailer-5.2.7.gem) = 29696 diff --git a/mail/rubygem-actionmailer60/Makefile b/mail/rubygem-actionmailer60/Makefile index f214e48b43f..5abf0b0981b 100644 --- a/mail/rubygem-actionmailer60/Makefile +++ b/mail/rubygem-actionmailer60/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= actionmailer -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= mail rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/mail/rubygem-actionmailer60/distinfo b/mail/rubygem-actionmailer60/distinfo index 142290d64d8..ef8559c3e30 100644 --- a/mail/rubygem-actionmailer60/distinfo +++ b/mail/rubygem-actionmailer60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058914 -SHA256 (rubygem/actionmailer-6.0.4.6.gem) = b21358232a603f7fd1ab62737819e9cc7f4cffe906383025fb96c466ecc1a315 -SIZE (rubygem/actionmailer-6.0.4.6.gem) = 31232 +TIMESTAMP = 1647264920 +SHA256 (rubygem/actionmailer-6.0.4.7.gem) = d1d36aae32c3e7a44b321cd0bdda6ef8fcffcbfaaa2bf704b59e62ebf18362c7 +SIZE (rubygem/actionmailer-6.0.4.7.gem) = 31232 diff --git a/mail/rubygem-actionmailer61/Makefile b/mail/rubygem-actionmailer61/Makefile index b629d8aead6..0d14f6e0532 100644 --- a/mail/rubygem-actionmailer61/Makefile +++ b/mail/rubygem-actionmailer61/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= actionmailer -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= mail rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/mail/rubygem-actionmailer61/distinfo b/mail/rubygem-actionmailer61/distinfo index db8b67cc7af..39d87e6c639 100644 --- a/mail/rubygem-actionmailer61/distinfo +++ b/mail/rubygem-actionmailer61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058940 -SHA256 (rubygem/actionmailer-6.1.4.6.gem) = 88a375ed5d33352189e1702c05786e036429edc1f0ed346f96c2732476af0c57 -SIZE (rubygem/actionmailer-6.1.4.6.gem) = 30720 +TIMESTAMP = 1647264946 +SHA256 (rubygem/actionmailer-6.1.4.7.gem) = aab30433ba97cc18b6f00e1de434342688e31c341429e13feb0f2f3c7a365566 +SIZE (rubygem/actionmailer-6.1.4.7.gem) = 30720 diff --git a/mail/rubygem-actionmailer70/Makefile b/mail/rubygem-actionmailer70/Makefile index 6953253ba5f..c376e64b6c6 100644 --- a/mail/rubygem-actionmailer70/Makefile +++ b/mail/rubygem-actionmailer70/Makefile @@ -1,5 +1,5 @@ PORTNAME= actionmailer -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= mail rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/mail/rubygem-actionmailer70/distinfo b/mail/rubygem-actionmailer70/distinfo index 1f5020741f1..15b635a9615 100644 --- a/mail/rubygem-actionmailer70/distinfo +++ b/mail/rubygem-actionmailer70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058966 -SHA256 (rubygem/actionmailer-7.0.2.gem) = 4d3906735d04a77d9d2d3a14bdb147204f3bab0c7bc3ad44d86431883869cb2a -SIZE (rubygem/actionmailer-7.0.2.gem) = 30208 +TIMESTAMP = 1647264972 +SHA256 (rubygem/actionmailer-7.0.2.3.gem) = afd3391216b2c4a69807145d6792966dcbf731f5ad585bea07b37aacd8d15663 +SIZE (rubygem/actionmailer-7.0.2.3.gem) = 30208 diff --git a/mail/rubygem-mail/Makefile b/mail/rubygem-mail/Makefile index 0c4695ae012..51bdd9d2e8f 100644 --- a/mail/rubygem-mail/Makefile +++ b/mail/rubygem-mail/Makefile @@ -2,7 +2,7 @@ PORTNAME= mail PORTVERSION= 2.7.1 -PORTREVISION= 1 +PORTREVISION= 2 PORTEPOCH= 2 CATEGORIES= mail rubygems MASTER_SITES= RG @@ -13,13 +13,22 @@ COMMENT= Ruby email handler LICENSE= MIT LICENSE_FILE= ${WRKSRC}/MIT-LICENSE -RUN_DEPENDS= rubygem-mini_mime>=0.1.1:mail/rubygem-mini_mime \ - rubygem-net-smtp>0:mail/rubygem-net-smtp +RUN_DEPENDS= rubygem-mini_mime>=0.1.1:mail/rubygem-mini_mime USES= cpe gem -CPE_VENDOR= ${PORTNAME}_project USE_RUBY= yes NO_ARCH= yes -.include +CPE_VENDOR= mail_project + +.include + +# See https://github.com/mikel/mail/pull/1439 +.if ${RUBY_VER} >= 3.1 +RUN_DEPENDS+= rubygem-net-imap>=0:mail/rubygem-net-imap \ + rubygem-net-pop>=0:mail/rubygem-net-pop \ + rubygem-net-smtp>=0:mail/rubygem-net-smtp +.endif + +.include diff --git a/mail/rubygem-mail/files/patch-mail.gemspec b/mail/rubygem-mail/files/patch-mail.gemspec deleted file mode 100644 index 5b1682def04..00000000000 --- a/mail/rubygem-mail/files/patch-mail.gemspec +++ /dev/null @@ -1,18 +0,0 @@ ---- mail.gemspec.orig 2021-11-10 18:38:59 UTC -+++ mail.gemspec -@@ -24,6 +24,7 @@ Gem::Specification.new do |s| - end - - if s.respond_to? :add_runtime_dependency then -+ s.add_runtime_dependency(%q.freeze, [">= 0"]) - s.add_runtime_dependency(%q.freeze, [">= 0.1.1"]) - s.add_development_dependency(%q.freeze, [">= 1.0.3"]) - s.add_development_dependency(%q.freeze, ["> 0.8.7"]) -@@ -31,6 +32,7 @@ Gem::Specification.new do |s| - s.add_development_dependency(%q.freeze, [">= 0"]) - s.add_development_dependency(%q.freeze, [">= 0"]) - else -+ s.add_dependency(%q.freeze, [">= 0"]) - s.add_dependency(%q.freeze, [">= 0.1.1"]) - s.add_dependency(%q.freeze, [">= 1.0.3"]) - s.add_dependency(%q.freeze, ["> 0.8.7"]) diff --git a/mail/s-nail/Makefile b/mail/s-nail/Makefile index 89d46fa8023..f0bec1fc2f0 100644 --- a/mail/s-nail/Makefile +++ b/mail/s-nail/Makefile @@ -1,5 +1,5 @@ PORTNAME= s-nail -DISTVERSION= 14.9.23 +DISTVERSION= 14.9.24 CATEGORIES= mail MASTER_SITES= https://www.sdaoden.eu/downloads/ diff --git a/mail/s-nail/distinfo b/mail/s-nail/distinfo index 305398ba13f..b157c8d735b 100644 --- a/mail/s-nail/distinfo +++ b/mail/s-nail/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1636695384 -SHA256 (s-nail-14.9.23.tar.xz) = 2c717b22f4cd8719b82b6618640da6031382d2bf8eb51283bca2c6266957bca8 -SIZE (s-nail-14.9.23.tar.xz) = 799572 +TIMESTAMP = 1648319427 +SHA256 (s-nail-14.9.24.tar.xz) = 2714d6b8fb2af3b363fc7c79b76d058753716345d1b6ebcd8870ecd0e4f7ef8c +SIZE (s-nail-14.9.24.tar.xz) = 803100 diff --git a/math/Makefile b/math/Makefile index bb9be988b65..e90fcc060cb 100644 --- a/math/Makefile +++ b/math/Makefile @@ -834,6 +834,7 @@ SUBDIR += py-fastdtw SUBDIR += py-flax SUBDIR += py-fpylll + SUBDIR += py-fraction SUBDIR += py-fvcore SUBDIR += py-gau2grid SUBDIR += py-gimmik @@ -867,10 +868,10 @@ SUBDIR += py-matplotlib2 SUBDIR += py-mip SUBDIR += py-mixsimulator + SUBDIR += py-moarchiving SUBDIR += py-mpmath SUBDIR += py-munkres SUBDIR += py-munkres10 - SUBDIR += py-mutatormath SUBDIR += py-ndindex SUBDIR += py-networkx SUBDIR += py-nevergrad diff --git a/math/R-cran-igraph/Makefile b/math/R-cran-igraph/Makefile index 5022e72dce0..f752b528375 100644 --- a/math/R-cran-igraph/Makefile +++ b/math/R-cran-igraph/Makefile @@ -2,6 +2,7 @@ PORTNAME= igraph DISTVERSION= 1.2.11 +PORTREVISION= 1 CATEGORIES= math DISTNAME= ${PORTNAME}_${DISTVERSION} diff --git a/math/cadabra2/Makefile b/math/cadabra2/Makefile index 7eb4b49f313..ace6792fab2 100644 --- a/math/cadabra2/Makefile +++ b/math/cadabra2/Makefile @@ -1,5 +1,6 @@ PORTNAME= cadabra2 DISTVERSION= 2.3.8 +PORTREVISION= 1 CATEGORIES= math MAINTAINER= yuri@FreeBSD.org diff --git a/math/coq/Makefile b/math/coq/Makefile index 96177465a17..26a24ed12b2 100644 --- a/math/coq/Makefile +++ b/math/coq/Makefile @@ -1,6 +1,6 @@ PORTNAME= coq PORTVERSION= 8.6 -PORTREVISION= 8 +PORTREVISION= 9 PORTEPOCH= 3 CATEGORIES= math MASTER_SITES= http://coq.inria.fr/distrib/V${PORTVERSION}/files/ \ diff --git a/math/drgeo/Makefile b/math/drgeo/Makefile index b878d14d4da..ad59a803487 100644 --- a/math/drgeo/Makefile +++ b/math/drgeo/Makefile @@ -2,7 +2,7 @@ PORTNAME= drgeo PORTVERSION= 1.1.0 -PORTREVISION= 22 +PORTREVISION= 23 CATEGORIES= math MASTER_SITES= SF/ofset/${PORTNAME}/${PORTVERSION} diff --git a/math/ggobi/Makefile b/math/ggobi/Makefile index de5d950b259..aa6ddc3ac00 100644 --- a/math/ggobi/Makefile +++ b/math/ggobi/Makefile @@ -2,7 +2,7 @@ PORTNAME= ggobi PORTVERSION= 2.1.11 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= math graphics MASTER_SITES= http://www.ggobi.org/downloads/ diff --git a/math/gkmap/Makefile b/math/gkmap/Makefile index 3b598de8316..71605700c83 100644 --- a/math/gkmap/Makefile +++ b/math/gkmap/Makefile @@ -2,7 +2,7 @@ PORTNAME= gkmap PORTVERSION= 0.2 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= math MASTER_SITES= SF/gkmap/gkmap/gkmap-${PORTVERSION}/ diff --git a/math/gnome-calculator/Makefile b/math/gnome-calculator/Makefile index 1d804ec1b85..bb93fdd19fe 100644 --- a/math/gnome-calculator/Makefile +++ b/math/gnome-calculator/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-calculator PORTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= math gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/math/gnumeric/Makefile b/math/gnumeric/Makefile index e8d55a76654..af64de9b61d 100644 --- a/math/gnumeric/Makefile +++ b/math/gnumeric/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnumeric PORTVERSION= 1.12.50 +PORTREVISION= 1 CATEGORIES= math gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/math/gretl/Makefile b/math/gretl/Makefile index e3f80f59585..7b0ee6998a3 100644 --- a/math/gretl/Makefile +++ b/math/gretl/Makefile @@ -2,6 +2,7 @@ PORTNAME= gretl PORTVERSION= 2022a +PORTREVISION= 1 CATEGORIES= math finance MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${DISTVERSIONFULL}/ diff --git a/math/igraph/Makefile b/math/igraph/Makefile index 805db0815e5..c005803d64c 100644 --- a/math/igraph/Makefile +++ b/math/igraph/Makefile @@ -2,6 +2,7 @@ PORTNAME= igraph DISTVERSION= 0.9.6 +PORTREVISION= 1 CATEGORIES= math MASTER_SITES= https://github.com/${PORTNAME}/${PORTNAME}/releases/download/${DISTVERSION}/ diff --git a/math/lean/Makefile b/math/lean/Makefile index 67bb222dfe3..40d0c661342 100644 --- a/math/lean/Makefile +++ b/math/lean/Makefile @@ -1,6 +1,6 @@ PORTNAME= lean DISTVERSIONPREFIX= v -DISTVERSION= 3.42.0 +DISTVERSION= 3.42.1 CATEGORIES= math MAINTAINER= yuri@FreeBSD.org diff --git a/math/lean/distinfo b/math/lean/distinfo index 96c20cdb9df..d03b3b3823a 100644 --- a/math/lean/distinfo +++ b/math/lean/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647732048 -SHA256 (leanprover-community-lean-v3.42.0_GH0.tar.gz) = b6f96feb25f55c346daadee4f7277fbd9694d3f3f3507ce8cfd9539a04066680 -SIZE (leanprover-community-lean-v3.42.0_GH0.tar.gz) = 1908134 +TIMESTAMP = 1648348914 +SHA256 (leanprover-community-lean-v3.42.1_GH0.tar.gz) = 5b8cbfdea6cf4de5488467297958876aa0b3a79ed5806f7d0f01a0c396beb4e2 +SIZE (leanprover-community-lean-v3.42.1_GH0.tar.gz) = 1908398 diff --git a/math/libqalculate/Makefile b/math/libqalculate/Makefile index 679409e39a2..bc09a13c1b0 100644 --- a/math/libqalculate/Makefile +++ b/math/libqalculate/Makefile @@ -2,6 +2,7 @@ PORTNAME= libqalculate PORTVERSION= 4.1.0 +PORTREVISION= 1 CATEGORIES= math MASTER_SITES= https://github.com/Qalculate/${PORTNAME}/releases/download/v${PORTVERSION}/ diff --git a/math/mate-calc/Makefile b/math/mate-calc/Makefile index 7478d55fe61..11283f97496 100644 --- a/math/mate-calc/Makefile +++ b/math/mate-calc/Makefile @@ -2,6 +2,7 @@ PORTNAME= mate-calc PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= math mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/math/mdal/Makefile b/math/mdal/Makefile index 1b280305dff..cd1aa498b4c 100644 --- a/math/mdal/Makefile +++ b/math/mdal/Makefile @@ -2,7 +2,7 @@ PORTNAME= mdal DISTVERSION= 0.9.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= math geography MAINTAINER= wen@FreeBSD.org diff --git a/math/octave-forge-image/Makefile b/math/octave-forge-image/Makefile index c37f8faf0ca..db7af87d8c9 100644 --- a/math/octave-forge-image/Makefile +++ b/math/octave-forge-image/Makefile @@ -1,8 +1,7 @@ # Created by: Stephen Montgomery-Smith PORTNAME= octave-forge-image -PORTVERSION= 2.12.0 -PORTREVISION= 2 +PORTVERSION= 2.14.0 CATEGORIES= math MAINTAINER= stephen@FreeBSD.org diff --git a/math/octave-forge-image/distinfo b/math/octave-forge-image/distinfo index 0854d0ba37d..924cd48d6c5 100644 --- a/math/octave-forge-image/distinfo +++ b/math/octave-forge-image/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1580697746 -SHA256 (octave-forge/image-2.12.0.tar.gz) = e7d58ced612bc6420d99bb06313250694e5f1d8dcc093293604c253c17c473b4 -SIZE (octave-forge/image-2.12.0.tar.gz) = 440633 +TIMESTAMP = 1648337785 +SHA256 (octave-forge/image-2.14.0.tar.gz) = 7515ea211a8cb8ef5d9d3bab85a36e9df5475e8b05a919a078e0d52746077133 +SIZE (octave-forge/image-2.14.0.tar.gz) = 467778 diff --git a/math/octave/Makefile b/math/octave/Makefile index 27c68963fe7..f7b7962db21 100644 --- a/math/octave/Makefile +++ b/math/octave/Makefile @@ -2,7 +2,7 @@ PORTNAME= octave PORTVERSION= ${OCTAVE_VERSION} -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= math MASTER_SITES= GNU diff --git a/math/openturns/Makefile b/math/openturns/Makefile index 63d923c1730..9ee773a77e4 100644 --- a/math/openturns/Makefile +++ b/math/openturns/Makefile @@ -1,7 +1,7 @@ PORTNAME= openturns DISTVERSIONPREFIX= v DISTVERSION= 1.18 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= math MAINTAINER= yuri@FreeBSD.org diff --git a/math/p5-Math-GMPf/Makefile b/math/p5-Math-GMPf/Makefile index 98b5a26b6c3..eace7899f13 100644 --- a/math/p5-Math-GMPf/Makefile +++ b/math/p5-Math-GMPf/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= Math-GMPf -PORTVERSION= 0.46 +PORTVERSION= 0.47 CATEGORIES= math perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/math/p5-Math-GMPf/distinfo b/math/p5-Math-GMPf/distinfo index c9ccd2c6f8a..813c5b49b38 100644 --- a/math/p5-Math-GMPf/distinfo +++ b/math/p5-Math-GMPf/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057910 -SHA256 (Math-GMPf-0.46.tar.gz) = 00f1a62951396668a4f89e190b357b0dfeac64277ab3e96dff0389cf8751f739 -SIZE (Math-GMPf-0.46.tar.gz) = 59260 +TIMESTAMP = 1647264480 +SHA256 (Math-GMPf-0.47.tar.gz) = 4476b657c0a0fce0fc9f36190711c8a7c00961aa84ceb73ddb8c86457f896551 +SIZE (Math-GMPf-0.47.tar.gz) = 61317 diff --git a/math/p5-Math-GMPq/Makefile b/math/p5-Math-GMPq/Makefile index 82b79dedcde..c9a784e9a16 100644 --- a/math/p5-Math-GMPq/Makefile +++ b/math/p5-Math-GMPq/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= Math-GMPq -PORTVERSION= 0.49 +PORTVERSION= 0.51 CATEGORIES= math perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/math/p5-Math-GMPq/distinfo b/math/p5-Math-GMPq/distinfo index 6e5f1cf53fb..7372f675cde 100644 --- a/math/p5-Math-GMPq/distinfo +++ b/math/p5-Math-GMPq/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057912 -SHA256 (Math-GMPq-0.49.tar.gz) = 6c1d7092d65f4afcb71ee17eb3a59ed03fc36446fb33dc31ac1f7a0686df0291 -SIZE (Math-GMPq-0.49.tar.gz) = 54117 +TIMESTAMP = 1647264482 +SHA256 (Math-GMPq-0.51.tar.gz) = 4ef7af29ffe63508642142be95336037bc4022fbac73ce98281e08f649c216ec +SIZE (Math-GMPq-0.51.tar.gz) = 56161 diff --git a/math/p5-Math-GMPz/Makefile b/math/p5-Math-GMPz/Makefile index 74bbc786fd2..68985322b49 100644 --- a/math/p5-Math-GMPz/Makefile +++ b/math/p5-Math-GMPz/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= Math-GMPz -PORTVERSION= 0.53 +PORTVERSION= 0.54 CATEGORIES= math perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/math/p5-Math-GMPz/distinfo b/math/p5-Math-GMPz/distinfo index 22f5991512e..2f2501f6acb 100644 --- a/math/p5-Math-GMPz/distinfo +++ b/math/p5-Math-GMPz/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057914 -SHA256 (Math-GMPz-0.53.tar.gz) = 1a649aebfefd2c486d352d7f8f52f7623a6ac70749708aa807ef701f04161015 -SIZE (Math-GMPz-0.53.tar.gz) = 96590 +TIMESTAMP = 1647264484 +SHA256 (Math-GMPz-0.54.tar.gz) = 13b386973f736b7871b97336015150671ff5efa9c1f0b0529db5b38070c0598c +SIZE (Math-GMPz-0.54.tar.gz) = 98822 diff --git a/math/pdal/Makefile b/math/pdal/Makefile index 66e15f3be36..f0edfb44dda 100644 --- a/math/pdal/Makefile +++ b/math/pdal/Makefile @@ -2,7 +2,7 @@ PORTNAME= pdal DISTVERSION= 2.3.0 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= math databases graphics MASTER_SITES= https://github.com/PDAL/PDAL/releases/download/${DISTVERSION}/ DISTNAME= ${PORTNAME}-${DISTVERSION}-src diff --git a/math/polymake/Makefile b/math/polymake/Makefile index 895757cc7d3..e8a02945b01 100644 --- a/math/polymake/Makefile +++ b/math/polymake/Makefile @@ -2,7 +2,7 @@ PORTNAME= polymake DISTVERSION= 4.6 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= math MASTER_SITES= https://polymake.org/lib/exe/fetch.php/download/ DISTNAME= ${PORTNAME}-${DISTVERSION}-minimal diff --git a/math/pspp/Makefile b/math/pspp/Makefile index 0209cacabb0..848a89b3409 100644 --- a/math/pspp/Makefile +++ b/math/pspp/Makefile @@ -2,7 +2,7 @@ PORTNAME= pspp PORTVERSION= 1.4.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= math MASTER_SITES= GNU diff --git a/math/py-cma/Makefile b/math/py-cma/Makefile index 375459e5016..f26faf78266 100644 --- a/math/py-cma/Makefile +++ b/math/py-cma/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= cma -PORTVERSION= 3.1.0 +PORTVERSION= 3.2.1 CATEGORIES= math python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -19,9 +19,11 @@ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes -OPTIONS_DEFINE= PLOTTING +OPTIONS_DEFINE= CONSTRAINED_SOLUTION_TRACKING PLOTTING +CONSTRAINED_SOLUTION_TRACKING_DESC= Convergence tracking support PLOTTING_DESC= Plotting support +CONSTRAINED_SOLUTION_TRACKING_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}moarchiving>=0:math/py-moarchiving@${PY_FLAVOR} PLOTTING_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}matplotlib>=0:math/py-matplotlib@${PY_FLAVOR} .include diff --git a/math/py-cma/distinfo b/math/py-cma/distinfo index ac9c95af751..fe1082f274c 100644 --- a/math/py-cma/distinfo +++ b/math/py-cma/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1624189695 -SHA256 (cma-3.1.0.tar.gz) = 3b83077fe3a52be1d8c82d61a22f600401c772b1add6ed5dd8152e1a2d15e3c6 -SIZE (cma-3.1.0.tar.gz) = 231877 +TIMESTAMP = 1647264602 +SHA256 (cma-3.2.1.tar.gz) = 88d63a67ed2608d212a4db5a0d1046cbf0da093e17a6d8be024081c9f330d16b +SIZE (cma-3.2.1.tar.gz) = 239340 diff --git a/math/py-fastcluster/Makefile b/math/py-fastcluster/Makefile index 42c88460389..c2d87dd228e 100644 --- a/math/py-fastcluster/Makefile +++ b/math/py-fastcluster/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= fastcluster -PORTVERSION= 1.2.4 +PORTVERSION= 1.2.6 CATEGORIES= math python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/math/py-fastcluster/distinfo b/math/py-fastcluster/distinfo index b543dc2fe3b..04df5124a3c 100644 --- a/math/py-fastcluster/distinfo +++ b/math/py-fastcluster/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632037188 -SHA256 (fastcluster-1.2.4.tar.gz) = b5697a26b5397004bba4ac6308e9e9b7a832dcccfcc0333554bc3898a55601a3 -SIZE (fastcluster-1.2.4.tar.gz) = 173782 +TIMESTAMP = 1647264604 +SHA256 (fastcluster-1.2.6.tar.gz) = aab886efa7b6bba7ac124f4498153d053e5a08b822d2254926b7206cdf5a8aa6 +SIZE (fastcluster-1.2.6.tar.gz) = 173773 diff --git a/math/py-fraction/Makefile b/math/py-fraction/Makefile new file mode 100644 index 00000000000..b27fa51eed1 --- /dev/null +++ b/math/py-fraction/Makefile @@ -0,0 +1,21 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= fraction +PORTVERSION= 2.1.1 +CATEGORIES= math python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +DISTNAME= Fraction-${PORTVERSION} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Carry out all the fraction operations + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE + +USES= python:3.7+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +.include diff --git a/math/py-fraction/distinfo b/math/py-fraction/distinfo new file mode 100644 index 00000000000..cb164375817 --- /dev/null +++ b/math/py-fraction/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264406 +SHA256 (Fraction-2.1.1.tar.gz) = 75a27c61c68f357dbb8ea395eab932b3a16a3aa2b6aa44a052bcef319724f882 +SIZE (Fraction-2.1.1.tar.gz) = 4568 diff --git a/math/py-fraction/pkg-descr b/math/py-fraction/pkg-descr new file mode 100644 index 00000000000..f7cc9ec4e0e --- /dev/null +++ b/math/py-fraction/pkg-descr @@ -0,0 +1,4 @@ +fraction is a Python module that is designed for fractional values +numerator/denominator. + +WWW: https://github.com/bradley101/fraction diff --git a/math/py-gym-notices/Makefile b/math/py-gym-notices/Makefile index 32eacaa2c60..de21dd7acee 100644 --- a/math/py-gym-notices/Makefile +++ b/math/py-gym-notices/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= gym-notices -PORTVERSION= 0.0.4 +PORTVERSION= 0.0.6 CATEGORIES= math python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/math/py-gym-notices/distinfo b/math/py-gym-notices/distinfo index e688c676a5c..c1304fc399d 100644 --- a/math/py-gym-notices/distinfo +++ b/math/py-gym-notices/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646057854 -SHA256 (gym-notices-0.0.4.tar.gz) = c490570fa91acafe25768f9877cc0dfe0700d68fb25dc9dfa84512e4bcf01805 -SIZE (gym-notices-0.0.4.tar.gz) = 2531 +TIMESTAMP = 1647264608 +SHA256 (gym-notices-0.0.6.tar.gz) = 1f12b828cf7ddd6b7be1ca6bccab9a136c8217ad1413080b97248c3ff76663b2 +SIZE (gym-notices-0.0.6.tar.gz) = 2512 diff --git a/math/py-gym/Makefile b/math/py-gym/Makefile index 5b1c42209f8..725875074d4 100644 --- a/math/py-gym/Makefile +++ b/math/py-gym/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= gym -PORTVERSION= 0.22.0 +PORTVERSION= 0.23.0 CATEGORIES= math python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/math/py-gym/distinfo b/math/py-gym/distinfo index 3dfb91d6d5e..d9d6b16cb6d 100644 --- a/math/py-gym/distinfo +++ b/math/py-gym/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058066 -SHA256 (gym-0.22.0.tar.gz) = 339144c89951758187c378111919bc0e2f1695f9e9d9699e3f19279a6398148d -SIZE (gym-0.22.0.tar.gz) = 631075 +TIMESTAMP = 1647264606 +SHA256 (gym-0.23.0.tar.gz) = dbd3d0c50fc1260b57e6f12ba792152b73551730512623b7653d6dfb2f7a105d +SIZE (gym-0.23.0.tar.gz) = 624422 diff --git a/math/py-gym/pkg-descr b/math/py-gym/pkg-descr index 291faba27a4..e260bc2e6a0 100644 --- a/math/py-gym/pkg-descr +++ b/math/py-gym/pkg-descr @@ -20,5 +20,5 @@ methods you should know: - render(self, mode='human'): Render one frame of the environment. The default mode will do something human friendly, such as pop up a window. -WWW: https://gym.openai.com/ +WWW: https://www.gymlibrary.ml/ WWW: https://github.com/openai/gym diff --git a/math/py-iohexperimenter/Makefile b/math/py-iohexperimenter/Makefile index dfd1e1b00ae..b919729ac6f 100644 --- a/math/py-iohexperimenter/Makefile +++ b/math/py-iohexperimenter/Makefile @@ -2,6 +2,7 @@ PORTNAME= iohexperimenter PORTVERSION= 0.2.9.2 +PORTREVISION= 1 CATEGORIES= math python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/math/py-iohexperimenter/files/patch-setup.py b/math/py-iohexperimenter/files/patch-setup.py new file mode 100644 index 00000000000..f888102c625 --- /dev/null +++ b/math/py-iohexperimenter/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2021-01-29 16:13:26 UTC ++++ setup.py +@@ -43,7 +43,7 @@ setup( + version=__version__, + author="Furong Ye, Diederick Vermetten, and Hao Wang", + author_email="f.ye@liacs.leidenuniv.nl", +- packages=find_packages(), ++ packages=find_packages(exclude=['tests*']), + description="The experimenter for Iterative Optimization Heuristic", + long_description=long_description, + long_description_content_type="text/markdown", diff --git a/math/py-moarchiving/Makefile b/math/py-moarchiving/Makefile new file mode 100644 index 00000000000..e81d13dbd0b --- /dev/null +++ b/math/py-moarchiving/Makefile @@ -0,0 +1,24 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= moarchiving +PORTVERSION= 0.6.0 +CATEGORIES= math python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Biobjective Archive class with hypervolume indicator and uncrowded hypervolume improvement computation + +LICENSE= BSD3CLAUSE + +USES= python:3.7+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +OPTIONS_DEFINE= ARBITRARY_PRECISION +ARBITRARY_PRECISION_DESC= Arbitrary precision hypervolume computation support + +ARBITRARY_PRECISION_RUN_DEPENDS=${PYTHON_PKGNAMEPREFIX}fraction>=0:math/py-fraction@${PY_FLAVOR} + +.include diff --git a/math/py-moarchiving/distinfo b/math/py-moarchiving/distinfo new file mode 100644 index 00000000000..f71517d957e --- /dev/null +++ b/math/py-moarchiving/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264408 +SHA256 (moarchiving-0.6.0.tar.gz) = 705ded992d399bc1ac703e68391bded6f64e1bde81b2bb25061eaa6208b5b29a +SIZE (moarchiving-0.6.0.tar.gz) = 14635 diff --git a/math/py-moarchiving/pkg-descr b/math/py-moarchiving/pkg-descr new file mode 100644 index 00000000000..9f1b15ea66b --- /dev/null +++ b/math/py-moarchiving/pkg-descr @@ -0,0 +1,7 @@ +The Python class moarchiving.BiobjectiveNondominatedSortedList implements a +bi-objective non-dominated archive with list as parent class. It is heavily +based on the bisect module. It provides easy and fast access to the overall +hypervolume, the contributing hypervolume of each element, and to the uncrowded +hypervolume improvement of any given point in objective space. + +WWW: https://github.com/CMA-ES/moarchiving diff --git a/math/py-mutatormath/Makefile b/math/py-mutatormath/Makefile deleted file mode 100644 index 583d99bf5ef..00000000000 --- a/math/py-mutatormath/Makefile +++ /dev/null @@ -1,25 +0,0 @@ -# Created by: Po-Chuan Hsieh - -PORTNAME= mutatormath -PORTVERSION= 3.0.1 -CATEGORIES= math python -MASTER_SITES= CHEESESHOP -PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} -DISTNAME= MutatorMath-${PORTVERSION} - -MAINTAINER= sunpoet@FreeBSD.org -COMMENT= Python for piecewise linear interpolation in multiple dimensions with multiple, arbitrarily placed, masters - -LICENSE= BSD3CLAUSE -LICENSE_FILE= ${WRKSRC}/LICENSE - -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}defcon>=0.3.5:x11-fonts/py-defcon@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}fontMath>=0.4.8:x11-fonts/py-fontMath@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}fonttools>=3.32.0:print/py-fonttools@${PY_FLAVOR} - -USES= python:3.7+ zip -USE_PYTHON= autoplist concurrent distutils - -NO_ARCH= yes - -.include diff --git a/math/py-mutatormath/distinfo b/math/py-mutatormath/distinfo deleted file mode 100644 index 1e8a31176cb..00000000000 --- a/math/py-mutatormath/distinfo +++ /dev/null @@ -1,3 +0,0 @@ -TIMESTAMP = 1646057856 -SHA256 (MutatorMath-3.0.1.zip) = 8127c1ffad1646f11a953752296c43f5d88cbd5293fff03f093d916b0bc13864 -SIZE (MutatorMath-3.0.1.zip) = 421722 diff --git a/math/py-mutatormath/pkg-descr b/math/py-mutatormath/pkg-descr deleted file mode 100644 index 8c66052359c..00000000000 --- a/math/py-mutatormath/pkg-descr +++ /dev/null @@ -1,8 +0,0 @@ -MutatorMath is a Python library for the calculation of piecewise linear -interpolations in n-dimensions with any number of masters. It was developed for -interpolating data related to fonts, but if can handle any arithmetic object. -- The objects/ subpackage contains the general calculation tools. -- The ufo/ subpackage contains tools to specifically process UFO data. -- MutatorMath has no user interface, just the math. - -WWW: https://github.com/LettError/MutatorMath diff --git a/math/py-pdal/Makefile b/math/py-pdal/Makefile index 19fe41f8e92..af7da466456 100644 --- a/math/py-pdal/Makefile +++ b/math/py-pdal/Makefile @@ -24,6 +24,8 @@ USE_PYTHON= autoplist cython distutils post-install: ${STRIP_CMD} ${STAGEDIR}${PYTHONPREFIX_SITELIBDIR}/${PORTNAME}/libpdalpython*.so + ${PYTHON_CMD} -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} + ${PYTHON_CMD} -O -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} do-test: @cd ${WRKSRC} && py.test -v test diff --git a/math/py-pdal/files/patch-2to3 b/math/py-pdal/files/patch-2to3 new file mode 100644 index 00000000000..055f6a9a324 --- /dev/null +++ b/math/py-pdal/files/patch-2to3 @@ -0,0 +1,27 @@ +--- pdal/pipeline.py.orig 2021-11-24 10:30:42 UTC ++++ pdal/pipeline.py +@@ -1,5 +1,5 @@ +-from __future__ import annotations + ++ + import json + import logging + from typing import Any, Container, Dict, Iterator, List, Optional, Sequence, Union, cast +@@ -19,7 +19,7 @@ LogLevelToPDAL = { + logging.INFO: 2, + logging.DEBUG: 8, # pdal::LogLevel::Debug5 + } +-LogLevelFromPDAL = {v: k for k, v in LogLevelToPDAL.items()} ++LogLevelFromPDAL = {v: k for k, v in list(LogLevelToPDAL.items())} + + + class Pipeline(libpdalpython.Pipeline): +@@ -106,7 +106,7 @@ class Pipeline(libpdalpython.Pipeline): + if all(isinstance(stage, Reader) for stage in stages): + stages = [*stages, Filter.merge()] + for stage in stages: +- stage2tag[stage] = stage.tag or _generate_tag(stage, stage2tag.values()) ++ stage2tag[stage] = stage.tag or _generate_tag(stage, list(stage2tag.values())) + options = stage.options + options["tag"] = stage2tag[stage] + inputs = _get_input_tags(stage, stage2tag) diff --git a/math/py-pyFFTW/Makefile b/math/py-pyFFTW/Makefile index 3231591978d..7f6d91f955a 100644 --- a/math/py-pyFFTW/Makefile +++ b/math/py-pyFFTW/Makefile @@ -18,7 +18,7 @@ LIB_DEPENDS= libfftw3.so:math/fftw3 \ libfftw3f.so:math/fftw3-float RUN_DEPENDS= ${PY_DEPENDS} -USES= python:3.6+ localbase:ldflags +USES= python:3.8+ localbase:ldflags USE_PYTHON= distutils cython autoplist .include diff --git a/math/py-pybloom/files/patch-2to3 b/math/py-pybloom/files/patch-2to3 new file mode 100644 index 00000000000..ccba959e211 --- /dev/null +++ b/math/py-pybloom/files/patch-2to3 @@ -0,0 +1,60 @@ +--- pybloom_live/benchmarks.py.orig 2018-07-22 09:08:15 UTC ++++ pybloom_live/benchmarks.py +@@ -2,9 +2,9 @@ + # + """Test performance of BloomFilter at a set capacity and error rate.""" + import sys +-from pybloom import BloomFilter ++from .pybloom import BloomFilter + import bitarray, math, time +-from utils import range_fn ++from .utils import range_fn + + + def main(capacity=100000, request_error_rate=0.1): +@@ -14,18 +14,18 @@ def main(capacity=100000, request_error_rate=0.1): + for i in range_fn(0, f.capacity): + f.add(i, skip_check=True) + end = time.time() +- print("{:5.3f} seconds to add to capacity, {:10.2f} entries/second".format( +- end - start, f.capacity / (end - start))) ++ print(("{:5.3f} seconds to add to capacity, {:10.2f} entries/second".format( ++ end - start, f.capacity / (end - start)))) + oneBits = f.bitarray.count(True) + zeroBits = f.bitarray.count(False) +- print "Number of 1 bits:", oneBits +- print "Number of 0 bits:", zeroBits +- print("Number of Filter Bits:", f.num_bits) +- print("Number of slices:", f.num_slices) +- print("Bits per slice:", f.bits_per_slice) ++ print("Number of 1 bits:", oneBits) ++ print("Number of 0 bits:", zeroBits) ++ print(("Number of Filter Bits:", f.num_bits)) ++ print(("Number of slices:", f.num_slices)) ++ print(("Bits per slice:", f.bits_per_slice)) + print("------") +- print("Fraction of 1 bits at capacity: {:5.3f}".format( +- oneBits / float(f.num_bits))) ++ print(("Fraction of 1 bits at capacity: {:5.3f}".format( ++ oneBits / float(f.num_bits)))) + # Look for false positives and measure the actual fp rate + trials = f.capacity + fp = 0 +@@ -36,14 +36,14 @@ def main(capacity=100000, request_error_rate=0.1): + end = time.time() + print(("{:5.3f} seconds to check false positives, " + "{:10.2f} checks/second".format(end - start, trials / (end - start)))) +- print("Requested FP rate: {:2.4f}".format(request_error_rate)) +- print("Experimental false positive rate: {:2.4f}".format(fp / float(trials))) ++ print(("Requested FP rate: {:2.4f}".format(request_error_rate))) ++ print(("Experimental false positive rate: {:2.4f}".format(fp / float(trials)))) + # Compute theoretical fp max (Goel/Gupta) + k = f.num_slices + m = f.num_bits + n = f.capacity + fp_theory = math.pow((1 - math.exp(-k * (n + 0.5) / (m - 1))), k) +- print("Projected FP rate (Goel/Gupta): {:2.6f}".format(fp_theory)) ++ print(("Projected FP rate (Goel/Gupta): {:2.6f}".format(fp_theory))) + + if __name__ == '__main__': + main() diff --git a/math/py-python-igraph/Makefile b/math/py-python-igraph/Makefile index b894469462d..1816f86d9a3 100644 --- a/math/py-python-igraph/Makefile +++ b/math/py-python-igraph/Makefile @@ -2,6 +2,7 @@ PORTNAME= python-igraph DISTVERSION= 0.9.9 +PORTREVISION= 1 CATEGORIES= math python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/math/py-svgmath/Makefile b/math/py-svgmath/Makefile index 12fdf68913a..fe7cfa1de64 100644 --- a/math/py-svgmath/Makefile +++ b/math/py-svgmath/Makefile @@ -10,7 +10,7 @@ DISTNAME= SVGMath-${PORTVERSION} MAINTAINER= ports@FreeBSD.org COMMENT= MathML to SVG Converter in Python -USES= python:3.6+ +USES= dos2unix python:3.6+ USE_PYTHON= distutils autoplist .include diff --git a/math/py-svgmath/files/patch-2to3 b/math/py-svgmath/files/patch-2to3 new file mode 100644 index 00000000000..3dc7555e4f8 --- /dev/null +++ b/math/py-svgmath/files/patch-2to3 @@ -0,0 +1,480 @@ +--- svgmath/fonts/afm.py.orig 2022-03-18 18:47:45 UTC ++++ svgmath/fonts/afm.py +@@ -1,5 +1,5 @@ + import sys, glyphlist +-from metric import FontMetric, CharMetric, FontFormatError ++from .metric import FontMetric, CharMetric, FontFormatError + + def parseLength(s): return 0.001 * float(s) + +@@ -23,7 +23,7 @@ class AFMMetric (FontMetric): + def readFontMetrics(self, afmfile): + line = afmfile.readline() + if not line.startswith("StartFontMetrics"): +- raise AFMFormatError, "File is not an AFM file" ++ raise AFMFormatError("File is not an AFM file") + # TODO: AFM version control + + while True: +@@ -49,7 +49,7 @@ class AFMMetric (FontMetric): + elif tokens[0] == "Weight": + self.weight = tokens[1].strip() + elif tokens[0] == "FontBBox": +- self.bbox = map (parseLength, tokens[1].split()) ++ self.bbox = list(map (parseLength, tokens[1].split())) + elif tokens[0] == "CapHeight": + self.capheight = parseLength(tokens[1]) + elif tokens[0] == "XHeight": +@@ -88,7 +88,7 @@ class AFMMetric (FontMetric): + if d[0] == "W" or d[0] == "WX" or d[0] == "W0X": + width = parseLength(d[1]) + elif d[0] == "B" and len(d) == 5: +- bbox = map (parseLength, d[1:]) ++ bbox = list(map (parseLength, d[1:])) + elif d[0] == "N": + glyphname = d[1] + +@@ -137,6 +137,6 @@ def main(): + if len (sys.argv) == 2: + AFMMetric(sys.argv[1], log=sys.stderr).dump() + else: +- print """Usage: AFM.py """ ++ print("""Usage: AFM.py """) + + if __name__ == "__main__": main() +--- svgmath/fonts/glyphlist.py.orig 2022-03-18 18:47:45 UTC ++++ svgmath/fonts/glyphlist.py +@@ -15,13 +15,13 @@ class GlyphList(dict): + if len (codelist) != 1: continue # no support for compounds + codepoint = int (codelist[0], 16) + +- if glyph in self.keys(): ++ if glyph in list(self.keys()): + self[glyph].append(codepoint) + else: + self[glyph] = [codepoint] + + def lookup(self, glyphname): +- if glyphname in self.keys(): return self.get(glyphname) ++ if glyphname in list(self.keys()): return self.get(glyphname) + else: return defaultGlyphList.get(glyphname) + + +@@ -34,8 +34,8 @@ def main(): + else: + glyphList = defaultGlyphList + +- for entry, value in glyphList.items(): +- print entry, " => ", value ++ for entry, value in list(glyphList.items()): ++ print(entry, " => ", value) + + if __name__ == "__main__": + main() +--- svgmath/fonts/metric.py.orig 2022-03-18 18:47:45 UTC ++++ svgmath/fonts/metric.py +@@ -37,28 +37,28 @@ class FontMetric: + def postParse(self): + # Get Ascender from the 'd' glyph + if self.ascender is None: +- cm = self.chardata.get(ord(u'd')) ++ cm = self.chardata.get(ord('d')) + if cm is not None: + self.descender = cm.bbox[3] + else: self.ascender = 0.7 + + # Get Descender from the 'p' glyph + if self.descender is None: +- cm = self.chardata.get(ord(u'p')) ++ cm = self.chardata.get(ord('p')) + if cm is not None: + self.descender = cm.bbox[1] + else: self.descender = -0.2 + + # Get CapHeight from the 'H' glyph + if self.capheight is None: +- cm = self.chardata.get(ord(u'H')) ++ cm = self.chardata.get(ord('H')) + if cm is not None: + self.capheight = cm.bbox[3] + else: self.capheight = self.ascender + + # Get XHeight from the 'H' glyph + if self.xheight is None: +- cm = self.chardata.get(ord(u'x')) ++ cm = self.chardata.get(ord('x')) + if cm is not None: + self.xheight = cm.bbox[3] + else: self.xheight = 0.45 +@@ -69,7 +69,7 @@ class FontMetric: + # "equal", "minus", "plus", "less", "greater", "periodcentered") + # Default is CapHeight / 2, or 0.3 if there's no CapHeight. + if self.axisposition is None: +- for ch in [ord(u'+'), 0x2212, ord(u'='), ord(u'<'), ord(u'>'), 0xB7]: ++ for ch in [ord('+'), 0x2212, ord('='), ord('<'), ord('>'), 0xB7]: + cm = self.chardata.get(ch) + if cm is not None: + self.axisposition = (cm.bbox[1] + cm.bbox[3]) / 2 +@@ -80,7 +80,7 @@ class FontMetric: + if self.underlinethickness is not None: + self.rulewidth = self.underlinethickness + else: +- for ch in [0x2013, 0x2014, 0x2015, 0x2212, ord(u'-')]: ++ for ch in [0x2013, 0x2014, 0x2015, 0x2212, ord('-')]: + cm = self.chardata.get(ch) + if cm is not None: + self.rulewidth = cm.bbox[3] - cm.bbox[1] +@@ -107,35 +107,35 @@ class FontMetric: + else: self.vgap = self.rulewidth * 2 + + # Set missing glyph to be a space +- self.missingGlyph = self.chardata.get(ord(u' ')) or self.chardata.get(0xA0) ++ self.missingGlyph = self.chardata.get(ord(' ')) or self.chardata.get(0xA0) + + def dump(self): +- print "FontName: ", self.fontname +- print "FullName: ", self.fullname +- print "FontFamily: ", self.family +- print "Weight: ", self.weight +- print "FontBBox: ", ++ print("FontName: ", self.fontname) ++ print("FullName: ", self.fullname) ++ print("FontFamily: ", self.family) ++ print("Weight: ", self.weight) ++ print("FontBBox: ", end=' ') + for x in self.bbox: +- print x, +- print +- print "CapHeight: ", self.capheight +- print "XHeight: ", self.xheight +- print "Ascender: ", self.ascender +- print "Descender: ", self.descender +- print "StdHW: ", self.stdhw +- print "StdVW: ", self.stdvw +- print "UnderlinePosition: ", self.underlineposition +- print "UnderlineThickness: ", self.underlinethickness +- print "ItalicAngle: ", self.italicangle +- print "CharWidth: ", self.charwidth +- print "MathematicalBaseline: ", self.axisposition +- print "Character data: " +- chars = self.chardata.items() ++ print(x, end=' ') ++ print() ++ print("CapHeight: ", self.capheight) ++ print("XHeight: ", self.xheight) ++ print("Ascender: ", self.ascender) ++ print("Descender: ", self.descender) ++ print("StdHW: ", self.stdhw) ++ print("StdVW: ", self.stdvw) ++ print("UnderlinePosition: ", self.underlineposition) ++ print("UnderlineThickness: ", self.underlinethickness) ++ print("ItalicAngle: ", self.italicangle) ++ print("CharWidth: ", self.charwidth) ++ print("MathematicalBaseline: ", self.axisposition) ++ print("Character data: ") ++ chars = list(self.chardata.items()) + chars.sort(key = lambda c: c[0]) + for i, cm in chars: + if cm is None: continue +- print " ", ("U+%04X" % i), cm.name+":", " W", cm.width, " B", ++ print(" ", ("U+%04X" % i), cm.name+":", " W", cm.width, " B", end=' ') + for x in cm.bbox: +- print x, +- print ++ print(x, end=' ') ++ print() + +\ No newline at end of file +--- svgmath/fonts/ttf.py.orig 2022-03-18 18:47:45 UTC ++++ svgmath/fonts/ttf.py +@@ -1,5 +1,5 @@ + import sys +-from metric import FontMetric, CharMetric, FontFormatError ++from .metric import FontMetric, CharMetric, FontFormatError + + def readUnsigned(ff, size): + res = 0; +@@ -41,14 +41,14 @@ class TTFMetric (FontMetric): + + def readFontMetrics(self, ff): + version = ff.read(4) +- if map(ord, version) == [0, 1, 0, 0]: ++ if list(map(ord, version)) == [0, 1, 0, 0]: + self.fonttype="TTF" + elif version == "OTTO": + # self.fonttype="OTF" + # At the moment, I cannot parse bbox data out from CFF +- raise TTFFormatError, "OpenType/CFF fonts are unsupported" ++ raise TTFFormatError("OpenType/CFF fonts are unsupported") + else: +- raise TTFFormatError, "Not a TrueType file" ++ raise TTFFormatError("Not a TrueType file") + + numTables = readUnsigned(ff, 2) + tables = {} +@@ -61,15 +61,15 @@ class TTFMetric (FontMetric): + tables[tag] = (offset, length) + + def switchTable(tableTag): +- if tableTag not in tables.keys(): +- raise TTFFormatError, "Required table "+tableTag+" missing in TrueType file" ++ if tableTag not in list(tables.keys()): ++ raise TTFFormatError("Required table "+tableTag+" missing in TrueType file") + return tables[tableTag] + + (offset, length) = switchTable("head") + ff.seek(offset+12) + magic = readUnsigned(ff, 4) + if magic != 0x5F0F3CF5: +- raise TTFFormatError, "Magic number in 'head' table does not match the spec" ++ raise TTFFormatError("Magic number in 'head' table does not match the spec") + skip(ff, 2) + self.unitsPerEm = readUnsigned(ff, 2) + emScale = 1.0 / self.unitsPerEm +@@ -107,24 +107,24 @@ class TTFMetric (FontMetric): + nameOffset = readUnsigned(ff, 2) + + if platformID == 3 and encodingID == 1: +- if languageID in englishCodes or nameID not in uniNames.keys(): ++ if languageID in englishCodes or nameID not in list(uniNames.keys()): + uniNames[nameID] = (nameOffset, nameLength) + elif platformID == 1 and encodingID == 0: +- if languageID == 0 or nameID not in macNames.keys(): ++ if languageID == 0 or nameID not in list(macNames.keys()): + macNames[nameID] = (nameOffset, nameLength) + + def getName (code): +- if code in macNames.keys(): ++ if code in list(macNames.keys()): + (nameOffset, nameLength) = macNames[code] + ff.seek (storageOffset + nameOffset) + return ff.read(nameLength) + # FIXME: repair Mac encoding here +- elif code in uniNames.keys(): ++ elif code in list(uniNames.keys()): + (nameOffset, nameLength) = uniNames[code] + ff.seek (storageOffset + nameOffset) +- result = u"" ++ result = "" + for i in range (0, nameLength/2): +- result += unichr(readUnsigned(ff, 2)) ++ result += chr(readUnsigned(ff, 2)) + return result + + self.family = getName(1) +@@ -198,7 +198,7 @@ class TTFMetric (FontMetric): + encodingScheme = "Symbol" + subtableOffset = cmapEncodings.get((3, 0)) + if subtableOffset is None: +- raise TTFFormatError, "Cannot use font '%s': no known subtable in 'cmap' table" % self.fullname ++ raise TTFFormatError("Cannot use font '%s': no known subtable in 'cmap' table" % self.fullname) + else: + if self.log: + self.log.write("WARNING: font '%s' is a symbolic font - Unicode mapping may be unreliable\n" % self.fullname) +@@ -207,7 +207,7 @@ class TTFMetric (FontMetric): + + tableFormat = readUnsigned(ff, 2) + if tableFormat != 4: +- raise TTFFormatError, "Unsupported format in 'cmap' table: %d" % tableFormat ++ raise TTFFormatError("Unsupported format in 'cmap' table: %d" % tableFormat) + + subtableLength = readUnsigned(ff, 2) + skip (ff, 2) +@@ -264,7 +264,7 @@ class TTFMetric (FontMetric): + for i in range (0, self.numGlyphs+1): + glyphIndex.append(readUnsigned(ff, 4)) + else: +- raise TTFFormatError, "Invalid indexToLocFormat value (%d) in 'head' table" % str(self.indexToLocFormat) ++ raise TTFFormatError("Invalid indexToLocFormat value (%d) in 'head' table" % str(self.indexToLocFormat)) + + (offset, length) = switchTable("glyf") + for i in range (0, self.numGlyphs): +@@ -284,6 +284,6 @@ def main(): + if len (sys.argv) == 2: + TTFMetric(sys.argv[1], log=sys.stderr).dump() + else: +- print """Usage: TTF.py """ ++ print("""Usage: TTF.py """) + + if __name__ == "__main__": main() +--- svgmath/mathconfig.py.orig 2022-03-18 18:47:45 UTC ++++ svgmath/mathconfig.py +@@ -2,9 +2,9 @@ + + import os, sys + from xml import sax +-from fonts.afm import AFMMetric +-from fonts.ttf import TTFMetric +-from fonts.metric import FontFormatError ++from .fonts.afm import AFMMetric ++from .fonts.ttf import TTFMetric ++from .fonts.metric import FontFormatError + + class MathConfig(sax.ContentHandler): + """Configuration for MathML-to-SVG formatter. +@@ -26,47 +26,47 @@ class MathConfig(sax.ContentHandler): + parser.setContentHandler(self) + parser.setFeature(sax.handler.feature_namespaces, 0) + parser.parse(configfile) +- except sax.SAXException, xcpt: +- print "Error parsing configuration file ", configfile, ": ", xcpt.getMessage() ++ except sax.SAXException as xcpt: ++ print("Error parsing configuration file ", configfile, ": ", xcpt.getMessage()) + sys.exit(1) + + + def startElement(self, name, attributes): +- if name == u"config": +- self.verbose = (attributes.get(u"verbose") == u"true") +- self.debug = (attributes.get(u"debug", u"")).replace(u",", u" ").split() ++ if name == "config": ++ self.verbose = (attributes.get("verbose") == "true") ++ self.debug = (attributes.get("debug", "")).replace(",", " ").split() + +- elif name == u"defaults": ++ elif name == "defaults": + self.defaults.update(attributes) + +- elif name == u"fallback": +- familyattr = attributes.get(u"family", u"") +- self.fallbackFamilies = [" ".join(x.split()) for x in familyattr.split(u",")] ++ elif name == "fallback": ++ familyattr = attributes.get("family", "") ++ self.fallbackFamilies = [" ".join(x.split()) for x in familyattr.split(",")] + +- elif name == u"family": +- self.currentFamily = attributes.get(u"name", u"") ++ elif name == "family": ++ self.currentFamily = attributes.get("name", "") + self.currentFamily = "".join(self.currentFamily.lower().split()) + +- elif name == u"font": +- weight = attributes.get(u"weight", u"normal") +- style = attributes.get(u"style", u"normal") ++ elif name == "font": ++ weight = attributes.get("weight", "normal") ++ style = attributes.get("style", "normal") + fontfullname = self.currentFamily; +- if weight != u"normal": +- fontfullname += u" " + weight +- if style != u"normal": +- fontfullname += u" " + style ++ if weight != "normal": ++ fontfullname += " " + weight ++ if style != "normal": ++ fontfullname += " " + style + try: +- if u"afm" in attributes.keys(): +- fontpath = attributes.get(u"afm") +- metric = AFMMetric(fontpath, attributes.get(u"glyph-list"), sys.stderr) +- elif u"ttf" in attributes.keys(): +- fontpath = attributes.get(u"ttf") ++ if "afm" in list(attributes.keys()): ++ fontpath = attributes.get("afm") ++ metric = AFMMetric(fontpath, attributes.get("glyph-list"), sys.stderr) ++ elif "ttf" in list(attributes.keys()): ++ fontpath = attributes.get("ttf") + metric = TTFMetric(fontpath, sys.stderr) + else: + sys.stderr.write("Bad record in configuration file: font is neither AFM nor TTF\n") + sys.stderr.write("Font entry for '%s' ignored\n" % fontfullname) + return +- except FontFormatError, err: ++ except FontFormatError as err: + sys.stderr.write("Invalid or unsupported file format in '%s': %s\n" % (fontpath, err.message)) + sys.stderr.write("Font entry for '%s' ignored\n" % fontfullname) + return +@@ -76,41 +76,41 @@ class MathConfig(sax.ContentHandler): + sys.stderr.write("Font entry for '%s' ignored\n" % fontfullname) + return + +- self.fonts[weight+u" "+style+u" "+self.currentFamily] = metric ++ self.fonts[weight+" "+style+" "+self.currentFamily] = metric + +- elif name == u"mathvariant": +- variantattr = attributes.get(u"name") +- familyattr = attributes.get(u"family", "") +- splitFamily = [" ".join(x.split()) for x in familyattr.split(u",")] +- weightattr = attributes.get(u"weight", u"normal") +- styleattr = attributes.get(u"style", u"normal") ++ elif name == "mathvariant": ++ variantattr = attributes.get("name") ++ familyattr = attributes.get("family", "") ++ splitFamily = [" ".join(x.split()) for x in familyattr.split(",")] ++ weightattr = attributes.get("weight", "normal") ++ styleattr = attributes.get("style", "normal") + self.variants[variantattr] = (weightattr, styleattr, splitFamily) + +- elif name == u"operator-style": +- opname = attributes.get(u"operator") ++ elif name == "operator-style": ++ opname = attributes.get("operator") + if opname: + styling = {} + styling.update(attributes) +- del styling[u"operator"] ++ del styling["operator"] + self.opstyles[opname] = styling + else: + sys.stderr.write("Bad record in configuration file: operator-style with no operator attribute\n") + + + def endElement(self, name): +- if name == u"family": ++ if name == "family": + self.currentFamily = None + + + def findfont(self, weight, style, family): + """Finds a metric for family+weight+style.""" +- weight = (weight or u"normal").strip() +- style = (style or u"normal").strip() +- family = "".join((family or u"").lower().split()) ++ weight = (weight or "normal").strip() ++ style = (style or "normal").strip() ++ family = "".join((family or "").lower().split()) + +- for w in [weight, u"normal"]: +- for s in [style, u"normal"]: +- metric = self.fonts.get(w+u" "+s+u" "+family) ++ for w in [weight, "normal"]: ++ for s in [style, "normal"]: ++ metric = self.fonts.get(w+" "+s+" "+family) + if metric: return metric + return None + +@@ -121,20 +121,20 @@ def main(): + else: + config = MathConfig(sys.argv[1]) + +- print "Options: verbose =", config.verbose, " debug =", config.debug +- print "Fonts:" +- for (font, metric) in config.fonts.items(): +- print " ", font, "-->", metric.fontname +- print "Math variants:" +- for (variant, value) in config.variants.items(): +- print " ", variant, "-->", value +- print "Defaults:" +- for (attr, value) in config.defaults.items(): +- print " ", attr, "=", value +- print "Operator styling:" +- for (opname, value) in config.opstyles.items(): +- print " ", repr(opname), ":", value +- print "Fallback font families:", config.fallbackFamilies ++ print("Options: verbose =", config.verbose, " debug =", config.debug) ++ print("Fonts:") ++ for (font, metric) in list(config.fonts.items()): ++ print(" ", font, "-->", metric.fontname) ++ print("Math variants:") ++ for (variant, value) in list(config.variants.items()): ++ print(" ", variant, "-->", value) ++ print("Defaults:") ++ for (attr, value) in list(config.defaults.items()): ++ print(" ", attr, "=", value) ++ print("Operator styling:") ++ for (opname, value) in list(config.opstyles.items()): ++ print(" ", repr(opname), ":", value) ++ print("Fallback font families:", config.fallbackFamilies) + + + if __name__ == "__main__": main() diff --git a/math/py-topologic/Makefile b/math/py-topologic/Makefile index 6b4bd28035d..9e2a33a2e38 100644 --- a/math/py-topologic/Makefile +++ b/math/py-topologic/Makefile @@ -29,7 +29,7 @@ PLIST_FILES= ${PYTHON_SITELIBDIR}/topologic${PYTHON_EXT_SUFFIX}.so do-install: # workaround for https://github.com/wassimj/Topologic/issues/34 ${MKDIR} ${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} - ${INSTALL_LIB} ${BUILD_WRKSRC}/topologic.cpython-38.so ${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} + ${INSTALL_LIB} ${BUILD_WRKSRC}/topologic${PYTHON_EXT_SUFFIX}.so ${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} do-test: install ${PYTHON_CMD} ${WRKSRC}/test/topologictest01.py diff --git a/math/qalculate-gtk/Makefile b/math/qalculate-gtk/Makefile index 568420c50fb..1ca26f05204 100644 --- a/math/qalculate-gtk/Makefile +++ b/math/qalculate-gtk/Makefile @@ -2,6 +2,7 @@ PORTNAME= qalculate-gtk PORTVERSION= 4.1.0 +PORTREVISION= 1 CATEGORIES= math MASTER_SITES= https://github.com/Qalculate/${PORTNAME}/releases/download/v${PORTVERSION}/ diff --git a/math/sage/Makefile b/math/sage/Makefile index eb689a7dc06..89e2fe290cc 100644 --- a/math/sage/Makefile +++ b/math/sage/Makefile @@ -19,6 +19,8 @@ COMMENT= Open source Mathematics software LICENSE= GPLv3 LICENSE_FILE= ${WRKSRC}/COPYING.txt +BROKEN= hoping to fix it with Python-3.9 + BUILD_DEPENDS= bash:shells/bash \ dash:shells/dash \ cmake:devel/cmake \ diff --git a/math/sc-im/Makefile b/math/sc-im/Makefile index 35697c528c5..8a972d35c29 100644 --- a/math/sc-im/Makefile +++ b/math/sc-im/Makefile @@ -1,7 +1,7 @@ PORTNAME= sc-im DISTVERSIONPREFIX= v DISTVERSION= 0.8.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= math MAINTAINER= bapt@FreeBSD.org diff --git a/math/scilab/Makefile b/math/scilab/Makefile index 29f9a523c3d..a41e6cccad0 100644 --- a/math/scilab/Makefile +++ b/math/scilab/Makefile @@ -2,6 +2,7 @@ PORTNAME= scilab PORTVERSION= 6.1.1 +PORTREVISION= 1 CATEGORIES= math cad java MASTER_SITES= https://cgit.scilab.org/scilab/snapshot/ diff --git a/math/stan/Makefile b/math/stan/Makefile index 3fe02f0afd7..cd832a05f52 100644 --- a/math/stan/Makefile +++ b/math/stan/Makefile @@ -1,6 +1,6 @@ PORTNAME= stan DISTVERSIONPREFIX= v -DISTVERSION= 2.29.1 +DISTVERSION= 2.29.2 CATEGORIES= math MAINTAINER= yuri@FreeBSD.org diff --git a/math/stan/distinfo b/math/stan/distinfo index b98a163e745..f0e6639eb3b 100644 --- a/math/stan/distinfo +++ b/math/stan/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646585229 -SHA256 (stan-dev-stan-v2.29.1_GH0.tar.gz) = 1f6151aea60a9177305adae07f119bbec42a3c38cda4ce136c4da07cc0b571df -SIZE (stan-dev-stan-v2.29.1_GH0.tar.gz) = 3930440 +TIMESTAMP = 1648347549 +SHA256 (stan-dev-stan-v2.29.2_GH0.tar.gz) = cb9f11c9ab1ce3322891330430574c5f47166ba6f7e5d615ed705a9453c36e8c +SIZE (stan-dev-stan-v2.29.2_GH0.tar.gz) = 3930651 diff --git a/math/stanmath/Makefile b/math/stanmath/Makefile index 895789ef29b..0207203b4d6 100644 --- a/math/stanmath/Makefile +++ b/math/stanmath/Makefile @@ -1,6 +1,6 @@ PORTNAME= stanmath DISTVERSIONPREFIX= v -DISTVERSION= 4.2.0 +DISTVERSION= 4.3.2 CATEGORIES= math MAINTAINER= yuri@FreeBSD.org diff --git a/math/stanmath/distinfo b/math/stanmath/distinfo index 783d940c35f..73bb508a653 100644 --- a/math/stanmath/distinfo +++ b/math/stanmath/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634484296 -SHA256 (stan-dev-math-v4.2.0_GH0.tar.gz) = 5201f94bef40c59496b86e29ba41285a7196e436c551c92c5d22ade2558e30c6 -SIZE (stan-dev-math-v4.2.0_GH0.tar.gz) = 28245840 +TIMESTAMP = 1648349477 +SHA256 (stan-dev-math-v4.3.2_GH0.tar.gz) = 9efbb1a9c40b5484b971a278bfef1b9fba3f0390af8d0d81928a820bb91a6781 +SIZE (stan-dev-math-v4.3.2_GH0.tar.gz) = 29376012 diff --git a/math/stanmath/pkg-plist b/math/stanmath/pkg-plist index 3bf30e281e0..86e738c1528 100644 --- a/math/stanmath/pkg-plist +++ b/math/stanmath/pkg-plist @@ -66,6 +66,7 @@ include/stan/math/fwd/fun/inc_beta.hpp include/stan/math/fwd/fun/inv.hpp include/stan/math/fwd/fun/inv_Phi.hpp include/stan/math/fwd/fun/inv_cloglog.hpp +include/stan/math/fwd/fun/inv_erfc.hpp include/stan/math/fwd/fun/inv_logit.hpp include/stan/math/fwd/fun/inv_sqrt.hpp include/stan/math/fwd/fun/inv_square.hpp @@ -77,6 +78,7 @@ include/stan/math/fwd/fun/lbeta.hpp include/stan/math/fwd/fun/ldexp.hpp include/stan/math/fwd/fun/lgamma.hpp include/stan/math/fwd/fun/lmgamma.hpp +include/stan/math/fwd/fun/lmultiply.hpp include/stan/math/fwd/fun/log.hpp include/stan/math/fwd/fun/log10.hpp include/stan/math/fwd/fun/log1m.hpp @@ -106,6 +108,8 @@ include/stan/math/fwd/fun/multiply.hpp include/stan/math/fwd/fun/multiply_log.hpp include/stan/math/fwd/fun/multiply_lower_tri_self_transpose.hpp include/stan/math/fwd/fun/norm.hpp +include/stan/math/fwd/fun/norm1.hpp +include/stan/math/fwd/fun/norm2.hpp include/stan/math/fwd/fun/owens_t.hpp include/stan/math/fwd/fun/polar.hpp include/stan/math/fwd/fun/pow.hpp @@ -226,6 +230,7 @@ include/stan/math/opencl/kernels/device_functions/inv_square.hpp include/stan/math/opencl/kernels/device_functions/lbeta.hpp include/stan/math/opencl/kernels/device_functions/lgamma_stirling.hpp include/stan/math/opencl/kernels/device_functions/lgamma_stirling_diff.hpp +include/stan/math/opencl/kernels/device_functions/lmultiply.hpp include/stan/math/opencl/kernels/device_functions/log1m.hpp include/stan/math/opencl/kernels/device_functions/log1m_exp.hpp include/stan/math/opencl/kernels/device_functions/log1m_inv_logit.hpp @@ -508,6 +513,7 @@ include/stan/math/opencl/rev/lb_constrain.hpp include/stan/math/opencl/rev/lbeta.hpp include/stan/math/opencl/rev/ldexp.hpp include/stan/math/opencl/rev/lgamma.hpp +include/stan/math/opencl/rev/lmultiply.hpp include/stan/math/opencl/rev/log.hpp include/stan/math/opencl/rev/log10.hpp include/stan/math/opencl/rev/log1m.hpp @@ -692,6 +698,7 @@ include/stan/math/prim/fun/asin.hpp include/stan/math/prim/fun/asinh.hpp include/stan/math/prim/fun/assign.hpp include/stan/math/prim/fun/atan.hpp +include/stan/math/prim/fun/atan2.hpp include/stan/math/prim/fun/atanh.hpp include/stan/math/prim/fun/autocorrelation.hpp include/stan/math/prim/fun/autocovariance.hpp @@ -814,6 +821,7 @@ include/stan/math/prim/fun/int_step.hpp include/stan/math/prim/fun/inv.hpp include/stan/math/prim/fun/inv_Phi.hpp include/stan/math/prim/fun/inv_cloglog.hpp +include/stan/math/prim/fun/inv_erfc.hpp include/stan/math/prim/fun/inv_logit.hpp include/stan/math/prim/fun/inv_sqrt.hpp include/stan/math/prim/fun/inv_square.hpp @@ -843,6 +851,7 @@ include/stan/math/prim/fun/linspaced_int_array.hpp include/stan/math/prim/fun/linspaced_row_vector.hpp include/stan/math/prim/fun/linspaced_vector.hpp include/stan/math/prim/fun/lmgamma.hpp +include/stan/math/prim/fun/lmultiply.hpp include/stan/math/prim/fun/log.hpp include/stan/math/prim/fun/log10.hpp include/stan/math/prim/fun/log1m.hpp @@ -906,6 +915,8 @@ include/stan/math/prim/fun/multiply.hpp include/stan/math/prim/fun/multiply_log.hpp include/stan/math/prim/fun/multiply_lower_tri_self_transpose.hpp include/stan/math/prim/fun/norm.hpp +include/stan/math/prim/fun/norm1.hpp +include/stan/math/prim/fun/norm2.hpp include/stan/math/prim/fun/num_elements.hpp include/stan/math/prim/fun/offset_multiplier_constrain.hpp include/stan/math/prim/fun/offset_multiplier_free.hpp @@ -1311,6 +1322,7 @@ include/stan/math/prim/prob/logistic_log.hpp include/stan/math/prim/prob/logistic_lpdf.hpp include/stan/math/prim/prob/logistic_rng.hpp include/stan/math/prim/prob/loglogistic_cdf.hpp +include/stan/math/prim/prob/loglogistic_log.hpp include/stan/math/prim/prob/loglogistic_lpdf.hpp include/stan/math/prim/prob/loglogistic_rng.hpp include/stan/math/prim/prob/lognormal_ccdf_log.hpp @@ -1479,7 +1491,9 @@ include/stan/math/prim/prob/uniform_lcdf.hpp include/stan/math/prim/prob/uniform_log.hpp include/stan/math/prim/prob/uniform_lpdf.hpp include/stan/math/prim/prob/uniform_rng.hpp +include/stan/math/prim/prob/von_mises_ccdf_log.hpp include/stan/math/prim/prob/von_mises_cdf.hpp +include/stan/math/prim/prob/von_mises_cdf_log.hpp include/stan/math/prim/prob/von_mises_lccdf.hpp include/stan/math/prim/prob/von_mises_lcdf.hpp include/stan/math/prim/prob/von_mises_log.hpp @@ -1616,6 +1630,7 @@ include/stan/math/rev/fun/cov_exp_quad.hpp include/stan/math/rev/fun/cov_matrix_constrain.hpp include/stan/math/rev/fun/cov_matrix_constrain_lkj.hpp include/stan/math/rev/fun/csr_matrix_times_vector.hpp +include/stan/math/rev/fun/cumulative_sum.hpp include/stan/math/rev/fun/determinant.hpp include/stan/math/rev/fun/diag_post_multiply.hpp include/stan/math/rev/fun/diag_pre_multiply.hpp @@ -1660,6 +1675,7 @@ include/stan/math/rev/fun/initialize_variable.hpp include/stan/math/rev/fun/inv.hpp include/stan/math/rev/fun/inv_Phi.hpp include/stan/math/rev/fun/inv_cloglog.hpp +include/stan/math/rev/fun/inv_erfc.hpp include/stan/math/rev/fun/inv_logit.hpp include/stan/math/rev/fun/inv_sqrt.hpp include/stan/math/rev/fun/inv_square.hpp @@ -1673,6 +1689,7 @@ include/stan/math/rev/fun/lbeta.hpp include/stan/math/rev/fun/ldexp.hpp include/stan/math/rev/fun/lgamma.hpp include/stan/math/rev/fun/lmgamma.hpp +include/stan/math/rev/fun/lmultiply.hpp include/stan/math/rev/fun/log.hpp include/stan/math/rev/fun/log10.hpp include/stan/math/rev/fun/log1m.hpp @@ -1706,6 +1723,8 @@ include/stan/math/rev/fun/multiply.hpp include/stan/math/rev/fun/multiply_log.hpp include/stan/math/rev/fun/multiply_lower_tri_self_transpose.hpp include/stan/math/rev/fun/norm.hpp +include/stan/math/rev/fun/norm1.hpp +include/stan/math/rev/fun/norm2.hpp include/stan/math/rev/fun/ordered_constrain.hpp include/stan/math/rev/fun/owens_t.hpp include/stan/math/rev/fun/polar.hpp @@ -1772,13 +1791,13 @@ include/stan/math/rev/functor/coupled_ode_system.hpp include/stan/math/rev/functor/cvodes_integrator.hpp include/stan/math/rev/functor/cvodes_integrator_adjoint.hpp include/stan/math/rev/functor/cvodes_utils.hpp +include/stan/math/rev/functor/dae.hpp +include/stan/math/rev/functor/dae_system.hpp include/stan/math/rev/functor/finite_diff_hessian_auto.hpp include/stan/math/rev/functor/gradient.hpp -include/stan/math/rev/functor/idas_forward_system.hpp include/stan/math/rev/functor/idas_integrator.hpp -include/stan/math/rev/functor/idas_system.hpp +include/stan/math/rev/functor/idas_service.hpp include/stan/math/rev/functor/integrate_1d.hpp -include/stan/math/rev/functor/integrate_dae.hpp include/stan/math/rev/functor/integrate_ode_adams.hpp include/stan/math/rev/functor/integrate_ode_bdf.hpp include/stan/math/rev/functor/jacobian.hpp diff --git a/math/tablix/Makefile b/math/tablix/Makefile index f3b4cdb18f8..263acc50e36 100644 --- a/math/tablix/Makefile +++ b/math/tablix/Makefile @@ -2,7 +2,7 @@ PORTNAME= tablix2 PORTVERSION= 0.3.5 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= math www MASTER_SITES= http://www.tablix.org/releases/stable/ \ http://my.samba.org/ diff --git a/math/timbl/Makefile b/math/timbl/Makefile index f7922f55e49..60ccb5160cc 100644 --- a/math/timbl/Makefile +++ b/math/timbl/Makefile @@ -3,6 +3,7 @@ PORTNAME= timbl DISTVERSIONPREFIX= v DISTVERSION= 6.6 +PORTREVISION= 1 CATEGORIES= math education MAINTAINER= yuri@FreeBSD.org diff --git a/misc/clifm/Makefile b/misc/clifm/Makefile index 7753c3f2404..22726e28433 100644 --- a/misc/clifm/Makefile +++ b/misc/clifm/Makefile @@ -1,7 +1,7 @@ # Created by: Alexey Dokuchaev PORTNAME= clifm -PORTVERSION= 1.4 +PORTVERSION= 1.5.1 DISTVERSIONPREFIX= v CATEGORIES= misc diff --git a/misc/clifm/distinfo b/misc/clifm/distinfo index aaa09e7a87f..5b841484fc2 100644 --- a/misc/clifm/distinfo +++ b/misc/clifm/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1642405518 -SHA256 (leo-arch-clifm-v1.4_GH0.tar.gz) = d6d8776ce14060912cfd04b6fdc9b04b032944ed58588954f21309ce13bf4f32 -SIZE (leo-arch-clifm-v1.4_GH0.tar.gz) = 1169233 +TIMESTAMP = 1648330915 +SHA256 (leo-arch-clifm-v1.5.1_GH0.tar.gz) = c615fbf52a0f701b3c509437d3edf0bbdcb6645712f08cdf745fac38b23a087b +SIZE (leo-arch-clifm-v1.5.1_GH0.tar.gz) = 1217409 SHA256 (leo-arch-clifm-colors-v0.1.0_GH0.tar.gz) = 147573aa1e83c286488fdf45c3673e6b417957fb3c5a7f70eba0021cc1f3aa70 SIZE (leo-arch-clifm-colors-v0.1.0_GH0.tar.gz) = 875324 diff --git a/misc/clifm/files/patch-Makefile b/misc/clifm/files/patch-Makefile index cfa245403e5..3c635f8452a 100644 --- a/misc/clifm/files/patch-Makefile +++ b/misc/clifm/files/patch-Makefile @@ -4,8 +4,8 @@ SRC = $(SRCDIR)/*.c HEADERS = $(SRCDIR)/*.h --CFLAGS ?= -O3 -fstack-protector-strong -march=native -Wall -+CFLAGS += -Wall +-CFLAGS ?= -O3 -fstack-protector-strong -march=native -Wall -DCLIFM_DATADIR=$(DATADIR) ++CFLAGS += -Wall -DCLIFM_DATADIR=$(DATADIR) LIBS_Linux ?= -lreadline -lacl -lcap -lmagic -LIBS_FreeBSD ?= -I/usr/local/include -L/usr/local/lib -lreadline -lintl -lmagic +LIBS_FreeBSD ?= -I$(LOCALBASE)/include -L$(LOCALBASE)/lib -lreadline -lintl -lmagic diff --git a/misc/copperspice-examples/Makefile b/misc/copperspice-examples/Makefile index d392accee2e..263c0243a68 100644 --- a/misc/copperspice-examples/Makefile +++ b/misc/copperspice-examples/Makefile @@ -1,7 +1,7 @@ PORTNAME= copperspice-examples DISTVERSIONPREFIX= ks- DISTVERSION= 1.7.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= misc MASTER_SITES= https://download.copperspice.com/kitchensink/source/ DISTNAME= KitchenSink-${DISTVERSION} diff --git a/misc/freebsd-doc-all/Makefile b/misc/freebsd-doc-all/Makefile index cfcc04c21b0..fbfa867abd0 100644 --- a/misc/freebsd-doc-all/Makefile +++ b/misc/freebsd-doc-all/Makefile @@ -1,7 +1,7 @@ # Created by: Marc Fonvieille PORTNAME= freebsd-doc-all -PORTVERSION= 20220311 +PORTVERSION= 20220326 PORTEPOCH= 1 CATEGORIES= misc docs diff --git a/misc/freebsd-doc-en/Makefile b/misc/freebsd-doc-en/Makefile index 553033a321c..e70d7ce63c0 100644 --- a/misc/freebsd-doc-en/Makefile +++ b/misc/freebsd-doc-en/Makefile @@ -1,7 +1,7 @@ # Created by: Marc Fonvieille PORTNAME= freebsd-doc -PORTVERSION= 20220311 +PORTVERSION= 20220326 PORTEPOCH= 1 CATEGORIES= misc docs MASTER_SITES= LOCAL/blackend diff --git a/misc/freebsd-doc-en/distinfo b/misc/freebsd-doc-en/distinfo index 87ef7ca3900..6a3ac1d12ac 100644 --- a/misc/freebsd-doc-en/distinfo +++ b/misc/freebsd-doc-en/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647078475 -SHA256 (freebsd-doc-20220311.tar.gz) = e90161a999342c8bb6774416801ac218c439c4b3528a6fbbdcee453762a3d701 -SIZE (freebsd-doc-20220311.tar.gz) = 97556551 +TIMESTAMP = 1648292882 +SHA256 (freebsd-doc-20220326.tar.gz) = a277b3ae156ff16fd203c2bfedd79a875bd6a0e8586c9fc743dab5a5a6fd9748 +SIZE (freebsd-doc-20220326.tar.gz) = 97658560 diff --git a/misc/gimp-help-en/Makefile b/misc/gimp-help-en/Makefile index e517ea03577..6919702012d 100644 --- a/misc/gimp-help-en/Makefile +++ b/misc/gimp-help-en/Makefile @@ -2,6 +2,7 @@ PORTNAME= gimp-help PORTVERSION= 2.10.0 +PORTREVISION= 1 CATEGORIES?= misc graphics MASTER_SITES= http://gimp.mirrors.hoobly.com/pub/%SUBDIR%/ \ http://ftp.gwdg.de/pub/misc/grafik/%SUBDIR%/ \ diff --git a/misc/hotkeys/Makefile b/misc/hotkeys/Makefile index d3393281963..9593a7ac721 100644 --- a/misc/hotkeys/Makefile +++ b/misc/hotkeys/Makefile @@ -2,7 +2,7 @@ PORTNAME= hotkeys PORTVERSION= 0.5.7.1 -PORTREVISION= 18 +PORTREVISION= 19 CATEGORIES= misc MASTER_SITES= https://BSDforge.com/projects/source/misc/hotkeys/${PORTVERSION}/ DISTNAME= hotkeys_${PORTVERSION} diff --git a/misc/iio-oscilloscope/Makefile b/misc/iio-oscilloscope/Makefile index 9e44f6b30d9..11b8d3b5825 100644 --- a/misc/iio-oscilloscope/Makefile +++ b/misc/iio-oscilloscope/Makefile @@ -1,6 +1,7 @@ PORTNAME= iio-oscilloscope DISTVERSIONPREFIX= v DISTVERSION= 0.14 +PORTREVISION= 1 DISTVERSIONSUFFIX= -master CATEGORIES= misc diff --git a/misc/libcomps/Makefile b/misc/libcomps/Makefile index 590a69cd1b8..133085bb10b 100644 --- a/misc/libcomps/Makefile +++ b/misc/libcomps/Makefile @@ -1,5 +1,6 @@ PORTNAME= libcomps DISTVERSION= 0.1.17 +PORTREVISION= 1 CATEGORIES= misc MAINTAINER= yuri@FreeBSD.org diff --git a/misc/libiio/Makefile b/misc/libiio/Makefile index f6fab5d6932..325967474e5 100644 --- a/misc/libiio/Makefile +++ b/misc/libiio/Makefile @@ -1,6 +1,7 @@ PORTNAME= libiio DISTVERSIONPREFIX= v DISTVERSION= 0.23 +PORTREVISION= 1 CATEGORIES= misc MAINTAINER= yuri@FreeBSD.org diff --git a/misc/liblxi/Makefile b/misc/liblxi/Makefile index a24d8044558..63fd48f9b25 100644 --- a/misc/liblxi/Makefile +++ b/misc/liblxi/Makefile @@ -1,6 +1,7 @@ PORTNAME= liblxi DISTVERSIONPREFIX= v DISTVERSION= 1.13 +PORTREVISION= 1 CATEGORIES= misc MAINTAINER= dmgk@FreeBSD.org diff --git a/misc/librepo/Makefile b/misc/librepo/Makefile index af412900634..acc1fe4be69 100644 --- a/misc/librepo/Makefile +++ b/misc/librepo/Makefile @@ -1,5 +1,6 @@ PORTNAME= librepo DISTVERSION= 1.14.2 +PORTREVISION= 1 CATEGORIES= misc MAINTAINER= yuri@FreeBSD.org diff --git a/misc/libsupertone/Makefile b/misc/libsupertone/Makefile index 7ea94b25c77..5fa335b7d3a 100644 --- a/misc/libsupertone/Makefile +++ b/misc/libsupertone/Makefile @@ -2,7 +2,7 @@ PORTNAME= libsupertone PORTVERSION= 0.0.2 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= misc MASTER_SITES= http://www.soft-switch.org/downloads/unicall/ diff --git a/misc/parley/Makefile b/misc/parley/Makefile index 980fc2ab64b..61a8b910d9a 100644 --- a/misc/parley/Makefile +++ b/misc/parley/Makefile @@ -1,5 +1,6 @@ PORTNAME= parley DISTVERSION= ${KDE_APPLICATIONS_VERSION} +PORTREVISION= 1 CATEGORIES= misc education kde kde-applications MAINTAINER= kde@FreeBSD.org diff --git a/misc/py-QSpectrumAnalyzer/Makefile b/misc/py-QSpectrumAnalyzer/Makefile index 1402edace9d..d8e599a99df 100644 --- a/misc/py-QSpectrumAnalyzer/Makefile +++ b/misc/py-QSpectrumAnalyzer/Makefile @@ -15,7 +15,7 @@ RUN_DEPENDS= ${PYNUMPY} \ ${PYTHON_PKGNAMEPREFIX}pyqtgraph>=0.10.0:graphics/py-pyqtgraph@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}soapy_power>=1.5.0:misc/py-soapy_power@${PY_FLAVOR} -USES= fortran python:3.4+ pyqt:5 +USES= fortran python:3.8+ pyqt:5 USE_PYQT= pyqt5 USE_PYTHON= distutils concurrent autoplist diff --git a/misc/py-alive-progress/Makefile b/misc/py-alive-progress/Makefile index 639c38020e1..1ed6c0c31c9 100644 --- a/misc/py-alive-progress/Makefile +++ b/misc/py-alive-progress/Makefile @@ -1,5 +1,5 @@ PORTNAME= alive-progress -DISTVERSION= 2.3.1 +DISTVERSION= 2.4.0 CATEGORIES= misc MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/misc/py-alive-progress/distinfo b/misc/py-alive-progress/distinfo index 0697b9fdc2e..d982cfd0748 100644 --- a/misc/py-alive-progress/distinfo +++ b/misc/py-alive-progress/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1644595318 -SHA256 (alive-progress-2.3.1.tar.gz) = 21b2808a25120e8c795115ca3f103c28bde89c3e8edb225736786a14e1c19f6b -SIZE (alive-progress-2.3.1.tar.gz) = 101091 +TIMESTAMP = 1648411502 +SHA256 (alive-progress-2.4.0.tar.gz) = 0f1191106a10ce91272b898638c6b272ef05234aac1a4cecaaa192e9ea38241d +SIZE (alive-progress-2.4.0.tar.gz) = 104603 diff --git a/misc/py-kartograph/files/patch-2to3 b/misc/py-kartograph/files/patch-2to3 new file mode 100644 index 00000000000..ea79cd778ed --- /dev/null +++ b/misc/py-kartograph/files/patch-2to3 @@ -0,0 +1,379 @@ +--- kartograph/cli.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/cli.py +@@ -6,7 +6,7 @@ command line interface for kartograph + import argparse + import os + import os.path +-from options import read_map_config ++from .options import read_map_config + import sys + + +@@ -37,7 +37,7 @@ parser.add_argument('--format', '-f', metavar='svg', h + parser.add_argument('--preview', '-p', nargs='?', metavar='', const=True, help='opens the generated svg for preview') + parser.add_argument('--pretty-print', '-P', dest='pretty_print', action='store_true', help='pretty print the svg file') + +-from kartograph import Kartograph ++from .kartograph import Kartograph + import time + import os + +@@ -74,7 +74,7 @@ def render_map(args): + # print str(r) + pass + +- except Exception, e: ++ except Exception as e: + print_error(e) + exit(-1) + +@@ -98,17 +98,17 @@ def main(): + + try: + args = parser.parse_args() +- except IOError, e: ++ except IOError as e: + # parser.print_help() + sys.stderr.write('\n' + str(e) + '\n') +- except Exception, e: ++ except Exception as e: + parser.print_help() +- print '\nError:', e ++ print('\nError:', e) + else: + args.func(args) + elapsed = (time.time() - start) + if args.output != '-': +- print 'execution time: %.3f secs' % elapsed ++ print('execution time: %.3f secs' % elapsed) + + sys.exit(0) + +--- kartograph/kartograph.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/kartograph.py +@@ -1,11 +1,11 @@ + +-from options import parse_options ++from .options import parse_options + from shapely.geometry import Polygon, LineString, MultiPolygon +-from errors import * ++from .errors import * + from copy import deepcopy +-from renderer import SvgRenderer +-from mapstyle import MapStyle +-from map import Map ++from .renderer import SvgRenderer ++from .mapstyle import MapStyle ++from .map import Map + import os + + +@@ -64,14 +64,14 @@ class Kartograph(object): + command = commands[sys.platform] + else: + sys.stderr.write('don\'t know how to preview SVGs on your system. Try setting the KARTOGRAPH_PREVIEW environment variable.') +- print renderer ++ print(renderer) + return + renderer.preview(command) + # Write the map to a file or return the renderer instance. + if outfile is None: + return renderer + elif outfile == '-': +- print renderer ++ print(renderer) + else: + renderer.write(outfile) + else: +--- kartograph/layersource/postgislayer.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/layersource/postgislayer.py +@@ -1,5 +1,5 @@ + +-from layersource import LayerSource ++from .layersource import LayerSource + from kartograph.errors import * + from kartograph.geometry import create_feature + import shapely.wkb +@@ -72,11 +72,11 @@ class PostGISLayer(LayerSource): + if fields[f] != self.geom_col: + # but ignore null values + if rec[f]: +- if isinstance(rec[f], (str, unicode)): ++ if isinstance(rec[f], str): + try: + meta[fields[f]] = rec[f].decode('utf-8') + except: +- print 'decoding error', fields[f], rec[f] ++ print('decoding error', fields[f], rec[f]) + meta[fields[f]] = '--decoding error--' + else: + meta[fields[f]] = rec[f] +@@ -84,7 +84,7 @@ class PostGISLayer(LayerSource): + # Store geometry + geom_wkb = rec[f] + +- if filter is None or filter(meta): ++ if filter is None or list(filter(meta)): + # construct geometry + geom = shapely.wkb.loads(geom_wkb.decode('hex')) + # Finally we construct the map feature and append it to the +--- kartograph/layersource/shplayer.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/layersource/shplayer.py +@@ -1,11 +1,11 @@ + +-from layersource import LayerSource ++from .layersource import LayerSource + from kartograph.errors import * + from kartograph.geometry import BBox, create_feature + from os.path import exists + from osgeo.osr import SpatialReference + import pyproj +-import shapefile ++from . import shapefile + + + verbose = False +@@ -20,7 +20,7 @@ class ShapefileLayer(LayerSource): + """ + initialize shapefile reader + """ +- if isinstance(src, unicode): ++ if isinstance(src, str): + src = src.encode('ascii', 'ignore') + src = self.find_source(src) + self.shpSrc = src +@@ -93,7 +93,7 @@ class ShapefileLayer(LayerSource): + for j in range(len(self.attributes)): + drec[self.attributes[j]] = self.recs[i][j] + # For each record that is not filtered.. +- if filter is None or filter(drec): ++ if filter is None or list(filter(drec)): + props = {} + # ..we try to decode the attributes (shapefile charsets are arbitrary) + for j in range(len(self.attributes)): +@@ -107,10 +107,10 @@ class ShapefileLayer(LayerSource): + break + except: + if verbose: +- print 'warning: could not decode "%s" to %s' % (val, enc) ++ print('warning: could not decode "%s" to %s' % (val, enc)) + if not decoded: + raise KartographError('having problems to decode the input data "%s"' % val) +- if isinstance(val, (str, unicode)): ++ if isinstance(val, str): + val = val.strip() + props[self.attributes[j]] = val + +@@ -129,7 +129,7 @@ class ShapefileLayer(LayerSource): + feature = create_feature(geom, props) + res.append(feature) + if bbox is not None and ignored > 0 and verbose: +- print "-ignoring %d shapes (not in bounds %s )" % (ignored, bbox) ++ print("-ignoring %d shapes (not in bounds %s )" % (ignored, bbox)) + return res + + # # shape2geometry +--- kartograph/map.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/map.py +@@ -1,11 +1,11 @@ + from shapely.geometry import Polygon + from shapely.geometry.base import BaseGeometry +-from maplayer import MapLayer +-from geometry.utils import geom_to_bbox +-from geometry import BBox, View +-from proj import projections +-from filter import filter_record +-from errors import KartographError ++from .maplayer import MapLayer ++from .geometry.utils import geom_to_bbox ++from .geometry import BBox, View ++from .proj import projections ++from .filter import filter_record ++from .errors import KartographError + import sys + + # Map +@@ -154,7 +154,7 @@ class Map(object): + ### Initialize bounding polygons and bounding box + ### Compute the projected bounding box + """ +- from geometry.utils import bbox_to_polygon ++ from .geometry.utils import bbox_to_polygon + + opts = self.options + proj = self.proj +@@ -306,7 +306,7 @@ class Map(object): + """ + ### Simplify geometries + """ +- from simplify import create_point_store, simplify_lines ++ from .simplify import create_point_store, simplify_lines + + # We will use a glocal point cache for all layers. If the + # same point appears in more than one layer, it will be +@@ -421,7 +421,7 @@ class Map(object): + a single feature. Kartograph uses the geometry.union() method of shapely + to do that. + """ +- from geometry.utils import join_features ++ from .geometry.utils import join_features + + for layer in self.layers: + if layer.options['join'] is not False: +@@ -517,7 +517,7 @@ class Map(object): + for feat in groupFeatures[g_id]: + exp[g_id].append(feat.props[join['export-ids']]) + import json +- print json.dumps(exp) ++ print(json.dumps(exp)) + + layer.features = res + +--- kartograph/proj/__init__.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/proj/__init__.py +@@ -18,8 +18,8 @@ + + projections = dict() + +-from base import Proj +-from cylindrical import * ++from .base import Proj ++from .cylindrical import * + + projections['lonlat'] = Equirectangular + projections['cea'] = CEA +@@ -30,7 +30,7 @@ projections['balthasart'] = Balthasart + projections['mercator'] = Mercator + projections['ll'] = LonLat + +-from pseudocylindrical import * ++from .pseudocylindrical import * + + projections['naturalearth'] = NaturalEarth + projections['robinson'] = Robinson +@@ -47,7 +47,7 @@ projections['aitoff'] = Aitoff + projections['winkel3'] = Winkel3 + projections['nicolosi'] = Nicolosi + +-from azimuthal import * ++from .azimuthal import * + + projections['ortho'] = Orthographic + projections['laea'] = LAEA +@@ -58,11 +58,11 @@ projections['satellite'] = Satellite + projections['eda'] = EquidistantAzimuthal + projections['aitoff'] = Aitoff + +-from conic import * ++from .conic import * + + projections['lcc'] = LCC + +-from proj4 import Proj4 ++from .proj4 import Proj4 + + projections['proj4'] = Proj4 + +@@ -78,7 +78,7 @@ if __name__ == '__main__': + #assert (round(x,2),round(y,2)) == (3962799.45, -2999718.85), 'LAEA proj error' + from kartograph.geometry import BBox + +- print Proj.fromXML(Robinson(lat0=3, lon0=4).toXML(), projections) ++ print(Proj.fromXML(Robinson(lat0=3, lon0=4).toXML(), projections)) + + Robinson(lat0=3, lon0=4) + +@@ -87,10 +87,10 @@ if __name__ == '__main__': + bbox = BBox() + try: + proj = Proj(lon0=60) +- print proj.project(0, 0) +- print proj.world_bounds(bbox) +- print proj.toXML() ++ print(proj.project(0, 0)) ++ print(proj.world_bounds(bbox)) ++ print(proj.toXML()) + except: +- print 'Error', pj +- print sys.exc_info()[0] ++ print('Error', pj) ++ print(sys.exc_info()[0]) + raise +--- kartograph/proj/azimuthal/azimuthal.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/proj/azimuthal/azimuthal.py +@@ -62,7 +62,7 @@ class Azimuthal(Proj): + def sea_shape(self, llbbox=(-180, -90, 180, 90)): + out = [] + if llbbox == (-180, -90, 180, 90) or llbbox == [-180, -90, 180, 90]: +- print "-> full extend" ++ print("-> full extend") + for phi in range(0, 360): + x = self.r + math.cos(math.radians(phi)) * self.r + y = self.r + math.sin(math.radians(phi)) * self.r +--- kartograph/renderer/svg.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/renderer/svg.py +@@ -239,7 +239,7 @@ class SvgRenderer(MapRenderer): + + key = labelOpts['key'] + if not key: +- key = feature.props.keys()[0] ++ key = list(feature.props.keys())[0] + if key not in feature.props: + #sys.stderr.write('could not find feature property "%s" for labeling\n' % key) + return +@@ -411,7 +411,7 @@ class SvgDocument(object): + # Here we finally write the SVG file, and we're brave enough + # to try to write it in Unicode. + def write(self, outfile, pretty_print=False): +- if isinstance(outfile, (str, unicode)): ++ if isinstance(outfile, str): + outfile = open(outfile, 'w') + if pretty_print: + raw = self.doc.toprettyxml('utf-8') +@@ -420,7 +420,7 @@ class SvgDocument(object): + try: + raw = raw.encode('utf-8') + except: +- print 'warning: could not encode to unicode' ++ print('warning: could not encode to unicode') + + outfile.write(raw) + outfile.close() +@@ -431,7 +431,7 @@ class SvgDocument(object): + import tempfile + tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', delete=False) + self.write(tmpfile, pretty_print) +- print 'map stored to', tmpfile.name ++ print('map stored to', tmpfile.name) + from subprocess import call + call([command, tmpfile.name]) + +--- kartograph/yaml_ordered_dict.py.orig 2014-03-27 03:57:55 UTC ++++ kartograph/yaml_ordered_dict.py +@@ -19,8 +19,8 @@ class OrderedDictYAMLLoader(yaml.Loader): + def __init__(self, *args, **kwargs): + yaml.Loader.__init__(self, *args, **kwargs) + +- self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map) +- self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map) ++ self.add_constructor('tag:yaml.org,2002:map', type(self).construct_yaml_map) ++ self.add_constructor('tag:yaml.org,2002:omap', type(self).construct_yaml_map) + + def construct_yaml_map(self, node): + data = OrderedDict() +@@ -40,7 +40,7 @@ class OrderedDictYAMLLoader(yaml.Loader): + key = self.construct_object(key_node, deep=deep) + try: + hash(key) +- except TypeError, exc: ++ except TypeError as exc: + raise yaml.constructor.ConstructorError('while constructing a mapping', + node.start_mark, 'found unacceptable key (%s)' % exc, key_node.start_mark) + value = self.construct_object(value_node, deep=deep) +@@ -63,4 +63,4 @@ two: + + data = yaml.load(textwrap.dedent(sample), OrderedDictYAMLLoader) + assert type(data) is OrderedDict +- print data +\ No newline at end of file ++ print(data) diff --git a/misc/py-onnx-tf/Makefile b/misc/py-onnx-tf/Makefile index 531ac896e83..51da4bdf589 100644 --- a/misc/py-onnx-tf/Makefile +++ b/misc/py-onnx-tf/Makefile @@ -1,5 +1,5 @@ PORTNAME= onnx-tf -DISTVERSION= 1.6.0 +DISTVERSION= 1.10.0 CATEGORIES= misc python # machine-learning MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/misc/py-onnx-tf/distinfo b/misc/py-onnx-tf/distinfo index 086659fcd47..e3ff076e36c 100644 --- a/misc/py-onnx-tf/distinfo +++ b/misc/py-onnx-tf/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1607028857 -SHA256 (onnx-tf-1.6.0.tar.gz) = 040f56d5de801346d1b054e4c120caf236c56008320de1c7ba48d65c13f911e4 -SIZE (onnx-tf-1.6.0.tar.gz) = 113534 +TIMESTAMP = 1648229628 +SHA256 (onnx-tf-1.10.0.tar.gz) = eee80c72a921dc5345cddd030001b65855264d659594b1d180b2958405e10269 +SIZE (onnx-tf-1.10.0.tar.gz) = 143616 diff --git a/misc/py-onnx-tf/files/patch-setup.py b/misc/py-onnx-tf/files/patch-setup.py new file mode 100644 index 00000000000..e526839a97d --- /dev/null +++ b/misc/py-onnx-tf/files/patch-setup.py @@ -0,0 +1,13 @@ +- same patch as suggested here: https://github.com/onnx/onnx-tensorflow/issues/1016 + +--- setup.py.orig 2022-03-25 17:32:07 UTC ++++ setup.py +@@ -30,7 +30,7 @@ setuptools.setup( + author='Arpith Jacob, Tian Jin, Gheorghe-Teodor Bercea, Wenhao Hu', + author_email='tian.jin1@ibm.com', + license='Apache License 2.0', +- packages=setuptools.find_packages(), ++ packages=setuptools.find_packages(exclude=['test*']), + zip_safe=False, + classifiers=[ + "Programming Language :: Python :: 2", diff --git a/misc/py-soapy_power/Makefile b/misc/py-soapy_power/Makefile index 23aa62e2c82..2305c1cab0e 100644 --- a/misc/py-soapy_power/Makefile +++ b/misc/py-soapy_power/Makefile @@ -16,7 +16,7 @@ RUN_DEPENDS= ${PYNUMPY} \ ${PYTHON_PKGNAMEPREFIX}SimpleSoapy>=1.5.0:misc/py-SimpleSoapy@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}SimpleSpectral>0:science/py-SimpleSpectral@${PY_FLAVOR} -USES= python:3.4+ +USES= python:3.8+ USE_PYTHON= distutils concurrent autoplist .include diff --git a/misc/py-tqdm/Makefile b/misc/py-tqdm/Makefile index 37773c3e1c9..6492a21ab15 100644 --- a/misc/py-tqdm/Makefile +++ b/misc/py-tqdm/Makefile @@ -1,5 +1,5 @@ PORTNAME= tqdm -PORTVERSION= 4.62.3 +PORTVERSION= 4.63.0 CATEGORIES= misc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/misc/py-tqdm/distinfo b/misc/py-tqdm/distinfo index 33f279b6f0a..007a7d6baa2 100644 --- a/misc/py-tqdm/distinfo +++ b/misc/py-tqdm/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632664358 -SHA256 (tqdm-4.62.3.tar.gz) = d359de7217506c9851b7869f3708d8ee53ed70a1b8edbba4dbcb47442592920d -SIZE (tqdm-4.62.3.tar.gz) = 167952 +TIMESTAMP = 1647264610 +SHA256 (tqdm-4.63.0.tar.gz) = 1d9835ede8e394bb8c9dcbffbca02d717217113adc679236873eeaac5bc0b3cd +SIZE (tqdm-4.63.0.tar.gz) = 168556 diff --git a/misc/py-xgboost/Makefile b/misc/py-xgboost/Makefile index 33321c2507a..ecce53c6355 100644 --- a/misc/py-xgboost/Makefile +++ b/misc/py-xgboost/Makefile @@ -35,6 +35,9 @@ POST_PLIST= fix-plist fix-plist: # https://github.com/dmlc/xgboost/issues/5705 @${REINPLACE_CMD} 's|.*libxgboost${PYTHON_EXT_SUFFIX}.so$$||' ${TMPPLIST} +post-install: + ${PYTHON_CMD} -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} + do-test: # tests fail w/out CUDA: https://github.com/dmlc/xgboost/issues/6881 @cd ${WRKSRC}/.. && ${PYTHON_CMD} -m pytest diff --git a/misc/py-xgboost/files/patch-2to3 b/misc/py-xgboost/files/patch-2to3 new file mode 100644 index 00000000000..54eac41ed21 --- /dev/null +++ b/misc/py-xgboost/files/patch-2to3 @@ -0,0 +1,375 @@ +--- xgboost/callback.py.orig 2022-01-17 08:52:31 UTC ++++ xgboost/callback.py +@@ -319,7 +319,7 @@ def _aggcv(rlist): + cvmap[(metric_idx, k)].append(float(v)) + msg = idx + results = [] +- for (metric_idx, k), v in sorted(cvmap.items(), key=lambda x: x[0][0]): ++ for (metric_idx, k), v in sorted(list(cvmap.items()), key=lambda x: x[0][0]): + v = numpy.array(v) + if not isinstance(msg, STRING_TYPES): + msg = msg.decode() +@@ -595,10 +595,10 @@ class EarlyStopping(TrainingCallback): + evals_log: TrainingCallback.EvalsLog) -> bool: + epoch += self.starting_round # training continuation + msg = 'Must have at least 1 validation dataset for early stopping.' +- assert len(evals_log.keys()) >= 1, msg ++ assert len(list(evals_log.keys())) >= 1, msg + data_name = '' + if self.data: +- for d, _ in evals_log.items(): ++ for d, _ in list(evals_log.items()): + if d == self.data: + data_name = d + if not data_name: +@@ -672,8 +672,8 @@ class EvaluationMonitor(TrainingCallback): + + msg: str = f'[{epoch}]' + if rabit.get_rank() == self.printer_rank: +- for data, metric in evals_log.items(): +- for metric_name, log in metric.items(): ++ for data, metric in list(evals_log.items()): ++ for metric_name, log in list(metric.items()): + stdv: Optional[float] = None + if isinstance(log[-1], tuple): + score = log[-1][0] +--- xgboost/compat.py.orig 2022-01-17 08:52:31 UTC ++++ xgboost/compat.py +@@ -48,14 +48,14 @@ except ImportError: + + # sklearn + try: +- from sklearn.base import BaseEstimator +- from sklearn.base import RegressorMixin, ClassifierMixin +- from sklearn.preprocessing import LabelEncoder ++ from .sklearn.base import BaseEstimator ++ from .sklearn.base import RegressorMixin, ClassifierMixin ++ from .sklearn.preprocessing import LabelEncoder + + try: +- from sklearn.model_selection import KFold, StratifiedKFold ++ from .sklearn.model_selection import KFold, StratifiedKFold + except ImportError: +- from sklearn.cross_validation import KFold, StratifiedKFold ++ from .sklearn.cross_validation import KFold, StratifiedKFold + + SKLEARN_INSTALLED = True + +@@ -71,7 +71,7 @@ try: + def to_json(self): + '''Returns a JSON compatible dictionary''' + meta = {} +- for k, v in self.__dict__.items(): ++ for k, v in list(self.__dict__.items()): + if isinstance(v, np.ndarray): + meta[k] = v.tolist() + else: +@@ -82,7 +82,7 @@ try: + # pylint: disable=attribute-defined-outside-init + '''Load the encoder back from a JSON compatible dict.''' + meta = {} +- for k, v in doc.items(): ++ for k, v in list(doc.items()): + if k == 'classes_': + self.classes_ = np.array(v) + continue +--- xgboost/core.py.orig 2022-01-17 08:52:31 UTC ++++ xgboost/core.py +@@ -142,7 +142,7 @@ def _expect(expectations, got): + + def _log_callback(msg: bytes) -> None: + """Redirect logs from native library into Python console""" +- print(py_str(msg)) ++ print((py_str(msg))) + + + def _get_log_callback_func(): +@@ -479,7 +479,7 @@ def _deprecate_positional_args(f): + kwonly_args = [] + all_args = [] + +- for name, param in sig.parameters.items(): ++ for name, param in list(sig.parameters.items()): + if param.kind == Parameter.POSITIONAL_OR_KEYWORD: + all_args.append(name) + elif param.kind == Parameter.KEYWORD_ONLY: +@@ -1346,7 +1346,7 @@ class Booster(object): + def _configure_metrics(self, params: Union[Dict, List]) -> Union[Dict, List]: + if isinstance(params, dict) and 'eval_metric' in params \ + and isinstance(params['eval_metric'], list): +- params = dict((k, v) for k, v in params.items()) ++ params = dict((k, v) for k, v in list(params.items())) + eval_metrics = params['eval_metric'] + params.pop("eval_metric", None) + params = list(params.items()) +@@ -1577,7 +1577,7 @@ class Booster(object): + **kwargs + The attributes to set. Setting a value to None deletes an attribute. + """ +- for key, value in kwargs.items(): ++ for key, value in list(kwargs.items()): + if value is not None: + if not isinstance(value, STRING_TYPES): + raise ValueError("Set Attr only accepts string values") +@@ -1650,7 +1650,7 @@ class Booster(object): + value of the specified parameter, when params is str key + """ + if isinstance(params, Mapping): +- params = params.items() ++ params = list(params.items()) + elif isinstance(params, STRING_TYPES) and value is not None: + params = [(params, value)] + for key, val in params: +--- xgboost/dask.py.orig 2022-01-17 08:52:31 UTC ++++ xgboost/dask.py +@@ -49,9 +49,9 @@ from .sklearn import _cls_predict_proba + from .sklearn import XGBRanker + + if TYPE_CHECKING: +- from dask import dataframe as dd +- from dask import array as da +- import dask ++ from .dask import dataframe as dd ++ from .dask import array as da ++ from . import dask + import distributed + else: + dd = LazyLoader('dd', globals(), 'dask.dataframe') +@@ -152,7 +152,7 @@ def _start_tracker(n_workers: int) -> Dict[str, Any]: + + def _assert_dask_support() -> None: + try: +- import dask # pylint: disable=W0621,W0611 ++ from . import dask # pylint: disable=W0621,W0611 + except ImportError as e: + raise ImportError( + "Dask needs to be installed in order to use this module" +@@ -394,7 +394,7 @@ class DaskDMatrix: + # [(x0, x1, ..), (y0, y1, ..), ..] in delayed form + + # delay the zipped result +- parts = list(map(dask.delayed, zip(*parts))) # pylint: disable=no-member ++ parts = list(map(dask.delayed, list(zip(*parts)))) # pylint: disable=no-member + # At this point, the mental model should look like: + # [(x0, y0, ..), (x1, y1, ..), ..] in delayed form + +@@ -414,7 +414,7 @@ class DaskDMatrix: + + worker_map: Dict[str, "distributed.Future"] = defaultdict(list) + +- for key, workers in who_has.items(): ++ for key, workers in list(who_has.items()): + worker_map[next(iter(workers))].append(key_to_partition[key]) + + self.worker_map = worker_map +@@ -803,7 +803,7 @@ def _dmatrix_from_list_of_parts( + async def _get_rabit_args(n_workers: int, client: "distributed.Client") -> List[bytes]: + '''Get rabit context arguments from data distribution in DaskDMatrix.''' + env = await client.run_on_scheduler(_start_tracker, n_workers) +- rabit_args = [f"{k}={v}".encode() for k, v in env.items()] ++ rabit_args = [f"{k}={v}".encode() for k, v in list(env.items())] + return rabit_args + + # train and predict methods are supposed to be "functional", which meets the +@@ -930,7 +930,7 @@ async def _train_async( + + results = await client.gather(futures, asynchronous=True) + +- return list(filter(lambda ret: ret is not None, results))[0] ++ return list([ret for ret in results if ret is not None])[0] + + + def train( # pylint: disable=unused-argument +@@ -1579,7 +1579,7 @@ class DaskScikitLearnBase(XGBModel): + + def __getstate__(self) -> Dict: + this = self.__dict__.copy() +- if "_client" in this.keys(): ++ if "_client" in list(this.keys()): + del this["_client"] + return this + +@@ -1711,7 +1711,7 @@ class DaskXGBRegressor(DaskScikitLearnBase, XGBRegress + callbacks: Optional[List[TrainingCallback]] = None, + ) -> "DaskXGBRegressor": + _assert_dask_support() +- args = {k: v for k, v in locals().items() if k not in ("self", "__class__")} ++ args = {k: v for k, v in list(locals().items()) if k not in ("self", "__class__")} + return self._client_sync(self._fit_async, **args) + + +@@ -1814,7 +1814,7 @@ class DaskXGBClassifier(DaskScikitLearnBase, XGBClassi + callbacks: Optional[List[TrainingCallback]] = None + ) -> "DaskXGBClassifier": + _assert_dask_support() +- args = {k: v for k, v in locals().items() if k not in ("self", "__class__")} ++ args = {k: v for k, v in list(locals().items()) if k not in ("self", "__class__")} + return self._client_sync(self._fit_async, **args) + + async def _predict_proba_async( +@@ -2002,7 +2002,7 @@ class DaskXGBRanker(DaskScikitLearnBase, XGBRankerMixI + callbacks: Optional[List[TrainingCallback]] = None + ) -> "DaskXGBRanker": + _assert_dask_support() +- args = {k: v for k, v in locals().items() if k not in ("self", "__class__")} ++ args = {k: v for k, v in list(locals().items()) if k not in ("self", "__class__")} + return self._client_sync(self._fit_async, **args) + + # FIXME(trivialfis): arguments differ due to additional parameters like group and qid. +@@ -2067,7 +2067,7 @@ class DaskXGBRFRegressor(DaskXGBRegressor): + callbacks: Optional[List[TrainingCallback]] = None + ) -> "DaskXGBRFRegressor": + _assert_dask_support() +- args = {k: v for k, v in locals().items() if k not in ("self", "__class__")} ++ args = {k: v for k, v in list(locals().items()) if k not in ("self", "__class__")} + _check_rf_callback(early_stopping_rounds, callbacks) + super().fit(**args) + return self +@@ -2131,7 +2131,7 @@ class DaskXGBRFClassifier(DaskXGBClassifier): + callbacks: Optional[List[TrainingCallback]] = None + ) -> "DaskXGBRFClassifier": + _assert_dask_support() +- args = {k: v for k, v in locals().items() if k not in ("self", "__class__")} ++ args = {k: v for k, v in list(locals().items()) if k not in ("self", "__class__")} + _check_rf_callback(early_stopping_rounds, callbacks) + super().fit(**args) + return self +--- xgboost/plotting.py.orig 2022-01-17 08:52:31 UTC ++++ xgboost/plotting.py +@@ -81,7 +81,7 @@ def plot_importance(booster, ax=None, height=0.2, + tuples = sorted(tuples, key=lambda x: x[1])[-max_num_features:] + else: + tuples = sorted(tuples, key=lambda x: x[1]) +- labels, values = zip(*tuples) ++ labels, values = list(zip(*tuples)) + + if ax is None: + _, ax = plt.subplots(1, 1) +@@ -177,13 +177,13 @@ def to_graphviz(booster, fmap='', num_trees=0, rankdir + # squash everything back into kwargs again for compatibility + parameters = 'dot' + extra = {} +- for key, value in kwargs.items(): ++ for key, value in list(kwargs.items()): + extra[key] = value + + if rankdir is not None: + kwargs['graph_attrs'] = {} + kwargs['graph_attrs']['rankdir'] = rankdir +- for key, value in extra.items(): ++ for key, value in list(extra.items()): + if kwargs.get("graph_attrs", None) is not None: + kwargs['graph_attrs'][key] = value + else: +--- xgboost/sklearn.py.orig 2022-01-17 08:52:31 UTC ++++ xgboost/sklearn.py +@@ -455,7 +455,7 @@ class XGBModel(XGBModelBase): + booster : a xgboost booster of underlying model + """ + if not self.__sklearn_is_fitted__(): +- from sklearn.exceptions import NotFittedError ++ from .sklearn.exceptions import NotFittedError + raise NotFittedError('need to call fit or load_model beforehand') + return self._Booster + +@@ -476,7 +476,7 @@ class XGBModel(XGBModelBase): + + # this concatenates kwargs into parameters, enabling `get_params` for + # obtaining parameters from keyword parameters. +- for key, value in params.items(): ++ for key, value in list(params.items()): + if hasattr(self, key): + setattr(self, key, value) + else: +@@ -526,14 +526,14 @@ class XGBModel(XGBModelBase): + internal = {} + while stack: + obj = stack.pop() +- for k, v in obj.items(): ++ for k, v in list(obj.items()): + if k.endswith('_param'): +- for p_k, p_v in v.items(): ++ for p_k, p_v in list(v.items()): + internal[p_k] = p_v + elif isinstance(v, dict): + stack.append(v) + +- for k, v in internal.items(): ++ for k, v in list(internal.items()): + if k in params and params[k] is None: + params[k] = parse_parameter(v) + except ValueError: +@@ -549,7 +549,7 @@ class XGBModel(XGBModelBase): + "enable_categorical" + } + filtered = {} +- for k, v in params.items(): ++ for k, v in list(params.items()): + if k not in wrapper_specific and not callable(v): + filtered[k] = v + return filtered +@@ -568,7 +568,7 @@ class XGBModel(XGBModelBase): + + def save_model(self, fname: Union[str, os.PathLike]) -> None: + meta = {} +- for k, v in self.__dict__.items(): ++ for k, v in list(self.__dict__.items()): + if k == '_le': + meta['_le'] = self._le.to_json() + continue +@@ -607,7 +607,7 @@ class XGBModel(XGBModelBase): + return + meta = json.loads(meta_str) + states = {} +- for k, v in meta.items(): ++ for k, v in list(meta.items()): + if k == '_le': + self._le = XGBoostLabelEncoder() + self._le.from_json(v) +@@ -660,7 +660,7 @@ class XGBModel(XGBModelBase): + + def _set_evaluation_result(self, evals_result: TrainingCallback.EvalsLog) -> None: + if evals_result: +- for val in evals_result.items(): ++ for val in list(evals_result.items()): + evals_result_key = list(val[1].keys())[0] + evals_result[val[0]][evals_result_key] = val[1][evals_result_key] + self.evals_result_ = evals_result +@@ -1455,7 +1455,7 @@ class XGBRFClassifier(XGBClassifier): + feature_weights: Optional[array_like] = None, + callbacks: Optional[List[TrainingCallback]] = None + ) -> "XGBRFClassifier": +- args = {k: v for k, v in locals().items() if k not in ("self", "__class__")} ++ args = {k: v for k, v in list(locals().items()) if k not in ("self", "__class__")} + _check_rf_callback(early_stopping_rounds, callbacks) + super().fit(**args) + return self +@@ -1526,7 +1526,7 @@ class XGBRFRegressor(XGBRegressor): + feature_weights: Optional[array_like] = None, + callbacks: Optional[List[TrainingCallback]] = None + ) -> "XGBRFRegressor": +- args = {k: v for k, v in locals().items() if k not in ("self", "__class__")} ++ args = {k: v for k, v in list(locals().items()) if k not in ("self", "__class__")} + _check_rf_callback(early_stopping_rounds, callbacks) + super().fit(**args) + return self +--- xgboost/training.py.orig 2022-01-17 08:52:31 UTC ++++ xgboost/training.py +@@ -452,7 +452,7 @@ def cv(params, dtrain, num_boost_round=10, nfold=3, st + if 'eval_metric' in params: + params['eval_metric'] = _metrics + else: +- params = dict((k, v) for k, v in params.items()) ++ params = dict((k, v) for k, v in list(params.items())) + + if (not metrics) and 'eval_metric' in params: + if isinstance(params['eval_metric'], list): +@@ -506,7 +506,7 @@ def cv(params, dtrain, num_boost_round=10, nfold=3, st + results[key + '-std'].append(std) + + if should_break: +- for k in results.keys(): # pylint: disable=consider-iterating-dictionary ++ for k in list(results.keys()): # pylint: disable=consider-iterating-dictionary + results[k] = results[k][:(booster.best_iteration + 1)] + break + if as_pandas: diff --git a/misc/rubygem-octicons/Makefile b/misc/rubygem-octicons/Makefile index 06a20602ab8..dbedd77421d 100644 --- a/misc/rubygem-octicons/Makefile +++ b/misc/rubygem-octicons/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= octicons -PORTVERSION= 16.3.1 +PORTVERSION= 17.0.0 CATEGORIES= misc rubygems MASTER_SITES= RG diff --git a/misc/rubygem-octicons/distinfo b/misc/rubygem-octicons/distinfo index 565cafd6da3..450d4b3a5e6 100644 --- a/misc/rubygem-octicons/distinfo +++ b/misc/rubygem-octicons/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058838 -SHA256 (rubygem/octicons-16.3.1.gem) = 3321ac9530bf70c691d6cfb6980e64e4e92e5497f2a730bc198186c611d38e11 -SIZE (rubygem/octicons-16.3.1.gem) = 158720 +TIMESTAMP = 1647264848 +SHA256 (rubygem/octicons-17.0.0.gem) = a0e314da01dcde84b946394765793c3aa819f0fcace3fcafc3fe482470245acf +SIZE (rubygem/octicons-17.0.0.gem) = 161280 diff --git a/misc/shared-mime-info/Makefile b/misc/shared-mime-info/Makefile index a76c9a38c9a..d11046785b6 100644 --- a/misc/shared-mime-info/Makefile +++ b/misc/shared-mime-info/Makefile @@ -1,8 +1,7 @@ # Created by: olgeni@FreeBSD.org PORTNAME= shared-mime-info -DISTVERSION= 2.0 -PORTREVISION= 2 +DISTVERSION= 2.2 CATEGORIES= misc gnome MAINTAINER= desktop@FreeBSD.org @@ -12,10 +11,9 @@ LICENSE= GPLv2 LICENSE_FILE= ${WRKSRC}/COPYING BUILD_DEPENDS= gmake:devel/gmake \ - itstool:textproc/itstool \ minixmlto:textproc/minixmlto -USES= gettext gnome localbase meson pathfix pkgconfig shebangfix \ +USES= gettext gnome localbase meson pathfix pkgconfig python:build shebangfix \ tar:xz trigger CONFIGURE_ARGS= -Dupdate-mimedb=false \ -Dxdgmime-path=${WRKSRC}/src/xdgmime @@ -24,10 +22,17 @@ USE_GITLAB= YES GL_SITE= https://gitlab.freedesktop.org/ GL_ACCOUNT= xdg GL_PROJECT= shared-mime-info -GL_COMMIT= ef58b2b2f7ad4070171c6e45e3b3764daa3ff2c1 -GL_TUPLE= xdg:xdgmime:6663a2288d11b37bc07f5a01b4b85dcd377787e1:xdgmime/src/xdgmime +GL_COMMIT= 13695c7225c2f525a435e72739c33ac32bbfbbb9 +GL_TUPLE= xdg:xdgmime:de283fc430460b9b3a7e61432a6d273cd64cb102:xdgmime/src/xdgmime bash_CMD= /bin/sh -SHEBANG_FILES= ${WRKSRC}/data/freedesktop_generate.sh +SHEBANG_FILES= ${WRKSRC}/tests/compare_iana.py \ + ${WRKSRC}/tests/mime-detection/pyside.py \ + ${WRKSRC}/tests/mime-detection/pyside.py \ + ${WRKSRC}/tests/mime-detection/test.py \ + ${WRKSRC}/tests/mime-detection/test.py3 \ + ${WRKSRC}/tests/mime-detection/test3.py \ + ${WRKSRC}/tests/test_case.py \ + ${WRKSRC}/tests/test_sub_class_ofs.py MIMESUBDIRS= application audio image inode message model multipart \ packages text video x-epoc x-content font @@ -38,6 +43,8 @@ post-patch: ${REINPLACE_CMD} 's|/usr/local|${LOCALBASE}|g' \ ${WRKSRC}/src/xdgmime/src/xdgmime.c \ ${WRKSRC}/src/update-mime-database.c + ${REINPLACE_CMD} 's|%%PYTHON_CMD%%|${PYTHON_CMD}|g' \ + ${WRKSRC}/data/freedesktop.org.xml.in pre-configure: cd ${WRKSRC}/src/xdgmime/src && ${SETENV} ${MAKE_ENV} ${GMAKE} diff --git a/misc/shared-mime-info/distinfo b/misc/shared-mime-info/distinfo index 95b29c3b956..b582e805425 100644 --- a/misc/shared-mime-info/distinfo +++ b/misc/shared-mime-info/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1617891556 -SHA256 (xdg-shared-mime-info-ef58b2b2f7ad4070171c6e45e3b3764daa3ff2c1_GL0.tar.gz) = 8ed0c0877d2f77663336f894899a69804779361fb44787e10392353381667ba7 -SIZE (xdg-shared-mime-info-ef58b2b2f7ad4070171c6e45e3b3764daa3ff2c1_GL0.tar.gz) = 6514756 -SHA256 (xdg-xdgmime-6663a2288d11b37bc07f5a01b4b85dcd377787e1_GL0.tar.gz) = dce208f8bc3fd2964893d209a3d0c78e839d0df9f10d4820bdca0ef5a7116710 -SIZE (xdg-xdgmime-6663a2288d11b37bc07f5a01b4b85dcd377787e1_GL0.tar.gz) = 32289 +TIMESTAMP = 1648442982 +SHA256 (xdg-shared-mime-info-13695c7225c2f525a435e72739c33ac32bbfbbb9_GL0.tar.gz) = 206169d8f3aa428040ae706e7693fa4aa1599e304856fb797f89fe737c93b961 +SIZE (xdg-shared-mime-info-13695c7225c2f525a435e72739c33ac32bbfbbb9_GL0.tar.gz) = 7067213 +SHA256 (xdg-xdgmime-de283fc430460b9b3a7e61432a6d273cd64cb102_GL0.tar.gz) = 9856934132a561c6c669b92839604b631ff92acdfe5876606290f785f86f865d +SIZE (xdg-xdgmime-de283fc430460b9b3a7e61432a6d273cd64cb102_GL0.tar.gz) = 32249 diff --git a/misc/shared-mime-info/files/patch-data_freedesktop.org.xml.in b/misc/shared-mime-info/files/patch-data_freedesktop.org.xml.in new file mode 100644 index 00000000000..ae802ea438e --- /dev/null +++ b/misc/shared-mime-info/files/patch-data_freedesktop.org.xml.in @@ -0,0 +1,10 @@ +--- data/freedesktop.org.xml.in.orig 2022-03-27 10:19:00 UTC ++++ data/freedesktop.org.xml.in +@@ -6638,6 +6638,7 @@ --> + Python 3 script + + ++ + + + diff --git a/misc/shared-mime-info/files/patch-data_freedesktop__generate.sh b/misc/shared-mime-info/files/patch-data_freedesktop__generate.sh deleted file mode 100644 index 87d0432ad90..00000000000 --- a/misc/shared-mime-info/files/patch-data_freedesktop__generate.sh +++ /dev/null @@ -1,10 +0,0 @@ -https://gitlab.freedesktop.org/xdg/shared-mime-info/-/issues/170 - ---- data/freedesktop_generate.sh.orig 2021-10-19 15:24:23 UTC -+++ data/freedesktop_generate.sh -@@ -9,4 +9,4 @@ itstool \ - --its "${src_root}/data/its/shared-mime-info.its" \ - --join "${src_root}/data/freedesktop.org.xml.in" \ - -o "${build_root}/data/freedesktop.org.xml" \ -- "${build_root}/po/"*".gmo" -+ "${build_root}/po/"*"/LC_MESSAGES/"*".mo" diff --git a/misc/shared-mime-info/files/patch-meson.build b/misc/shared-mime-info/files/patch-meson.build new file mode 100644 index 00000000000..37f611011ac --- /dev/null +++ b/misc/shared-mime-info/files/patch-meson.build @@ -0,0 +1,11 @@ +--- meson.build.orig 2022-03-28 04:52:01 UTC ++++ meson.build +@@ -74,7 +74,7 @@ configure_file( + 'prefix': get_option('prefix'), + 'VERSION': meson.project_version() + }, +- install_dir: get_option('datadir') / 'pkgconfig' ++ install_dir: join_paths(get_option('prefix'), 'libdata/pkgconfig') + ) + + if get_option('update-mimedb') diff --git a/misc/tellico/Makefile b/misc/tellico/Makefile index c016098acf0..502f7e4d10c 100644 --- a/misc/tellico/Makefile +++ b/misc/tellico/Makefile @@ -2,6 +2,7 @@ PORTNAME= tellico DISTVERSION= 3.4.4 +PORTREVISION= 1 CATEGORIES= misc kde MASTER_SITES= http://tellico-project.org/files/ diff --git a/misc/visp/Makefile b/misc/visp/Makefile index e34559b618f..469a6847002 100644 --- a/misc/visp/Makefile +++ b/misc/visp/Makefile @@ -1,6 +1,6 @@ PORTNAME= visp DISTVERSION= 3.4.0 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= misc MASTER_SITES= https://visp-doc.inria.fr/download/releases/ diff --git a/misc/xfce4-weather-plugin/Makefile b/misc/xfce4-weather-plugin/Makefile index d2542ac0446..0d5abc14b8d 100644 --- a/misc/xfce4-weather-plugin/Makefile +++ b/misc/xfce4-weather-plugin/Makefile @@ -2,6 +2,7 @@ PORTNAME= xfce4-weather-plugin PORTVERSION= 0.11.0 +PORTREVISION= 1 CATEGORIES= misc xfce geography MASTER_SITES= XFCE/panel-plugins DIST_SUBDIR= xfce4 diff --git a/misc/xiphos/Makefile b/misc/xiphos/Makefile index 4a8a6986179..4db77648320 100644 --- a/misc/xiphos/Makefile +++ b/misc/xiphos/Makefile @@ -1,6 +1,6 @@ PORTNAME= xiphos DISTVERSION= 4.2.1 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= misc gnome # Don't use USE_GITHUB: the sources downloaded by USE_GITHUB cannot be # built as they miss the file ${WRKSRC}/cmake/source_version.txt. diff --git a/misc/zoneinfo/Makefile b/misc/zoneinfo/Makefile index e876e91ef38..5cf4907e4e6 100644 --- a/misc/zoneinfo/Makefile +++ b/misc/zoneinfo/Makefile @@ -1,7 +1,7 @@ # Created by: Edwin Groothuis PORTNAME= zoneinfo -DISTVERSION= 2021e +DISTVERSION= 2022a CATEGORIES= misc MASTER_SITES= https://data.iana.org/time-zones/releases/ \ ftp://ftp.iana.org/tz/releases/ diff --git a/misc/zoneinfo/distinfo b/misc/zoneinfo/distinfo index 8f532f1d90e..4662aa3c0ff 100644 --- a/misc/zoneinfo/distinfo +++ b/misc/zoneinfo/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634931576 -SHA256 (tzdata2021e.tar.gz) = 07ec42b737d0d3c6be9c337f8abb5f00554a0f9cc4fcf01a703d69403b6bb2b1 -SIZE (tzdata2021e.tar.gz) = 422509 +TIMESTAMP = 1648256207 +SHA256 (tzdata2022a.tar.gz) = ef7fffd9f4f50f4f58328b35022a32a5a056b245c5cb3d6791dddb342f871664 +SIZE (tzdata2022a.tar.gz) = 425833 diff --git a/multimedia/audacious-plugins/Makefile b/multimedia/audacious-plugins/Makefile index f474a46e495..44e7448155d 100644 --- a/multimedia/audacious-plugins/Makefile +++ b/multimedia/audacious-plugins/Makefile @@ -2,7 +2,7 @@ PORTNAME= audacious-plugins PORTVERSION= 4.1 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= multimedia audio MASTER_SITES= http://distfiles.audacious-media-player.org/ diff --git a/multimedia/avidemux-cli/Makefile b/multimedia/avidemux-cli/Makefile index d84136c6f8a..73b8ae7238a 100644 --- a/multimedia/avidemux-cli/Makefile +++ b/multimedia/avidemux-cli/Makefile @@ -1,6 +1,6 @@ PORTNAME= avidemux PORTVERSION= ${AVIDEMUX_VERSION} -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia PKGNAMESUFFIX= -cli diff --git a/multimedia/avidemux-plugins/Makefile b/multimedia/avidemux-plugins/Makefile index 6297b1b336c..dbd72face45 100644 --- a/multimedia/avidemux-plugins/Makefile +++ b/multimedia/avidemux-plugins/Makefile @@ -2,7 +2,7 @@ PORTNAME= avidemux PORTVERSION= ${AVIDEMUX_VERSION} -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= multimedia PKGNAMESUFFIX= -plugins diff --git a/multimedia/avidemux-qt5/Makefile b/multimedia/avidemux-qt5/Makefile index 1253dfbbefd..3185a3e13e0 100644 --- a/multimedia/avidemux-qt5/Makefile +++ b/multimedia/avidemux-qt5/Makefile @@ -1,6 +1,6 @@ PORTNAME= avidemux PORTVERSION= ${AVIDEMUX_VERSION} -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia PKGNAMESUFFIX= -qt5 diff --git a/multimedia/avidemux/Makefile b/multimedia/avidemux/Makefile index fd966701af0..8e5dc35dbb4 100644 --- a/multimedia/avidemux/Makefile +++ b/multimedia/avidemux/Makefile @@ -5,7 +5,7 @@ PORTNAME= avidemux PORTVERSION= ${AVIDEMUX_VERSION} -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= multimedia MAINTAINER= multimedia@FreeBSD.org diff --git a/multimedia/cineencoder/Makefile b/multimedia/cineencoder/Makefile index d8d4119fe06..3e83e4de369 100644 --- a/multimedia/cineencoder/Makefile +++ b/multimedia/cineencoder/Makefile @@ -1,7 +1,7 @@ # Created by: Alexey Dokuchaev PORTNAME= ${GH_ACCOUNT:tl} -PORTVERSION= 3.5.2 +PORTVERSION= 3.5.3 CATEGORIES= multimedia MAINTAINER= danfe@FreeBSD.org diff --git a/multimedia/cineencoder/distinfo b/multimedia/cineencoder/distinfo index 186c2845706..0bb5f04b6ff 100644 --- a/multimedia/cineencoder/distinfo +++ b/multimedia/cineencoder/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637323933 -SHA256 (CineEncoder-cine-encoder-3.5.2_GH0.tar.gz) = 82b5bfe7fea307cdaf1274408d80ce3ea3f92646685df03d5d54dede5607bf5d -SIZE (CineEncoder-cine-encoder-3.5.2_GH0.tar.gz) = 1364708 +TIMESTAMP = 1648280195 +SHA256 (CineEncoder-cine-encoder-3.5.3_GH0.tar.gz) = 271ae1d1644581139e74982883f94c137730a5d54889f2de60c2e570892ae4ca +SIZE (CineEncoder-cine-encoder-3.5.3_GH0.tar.gz) = 1426263 diff --git a/multimedia/dvdauthor/Makefile b/multimedia/dvdauthor/Makefile index e3226137b54..26bc38ab7d2 100644 --- a/multimedia/dvdauthor/Makefile +++ b/multimedia/dvdauthor/Makefile @@ -2,6 +2,7 @@ PORTNAME= dvdauthor PORTVERSION= 0.7.2.20211105 +PORTREVISION= 1 CATEGORIES= multimedia MAINTAINER= ports@FreeBSD.org diff --git a/multimedia/ffmpeg/Makefile b/multimedia/ffmpeg/Makefile index 249ea0254d4..4726bd11e64 100644 --- a/multimedia/ffmpeg/Makefile +++ b/multimedia/ffmpeg/Makefile @@ -2,7 +2,7 @@ PORTNAME= ffmpeg PORTVERSION= 4.4.1 -PORTREVISION= 9 +PORTREVISION= 11 PORTEPOCH= 1 CATEGORIES= multimedia audio net MASTER_SITES= https://ffmpeg.org/releases/ diff --git a/multimedia/ffmpeg/files/patch-dav1d b/multimedia/ffmpeg/files/patch-dav1d new file mode 100644 index 00000000000..4af5b8b518e --- /dev/null +++ b/multimedia/ffmpeg/files/patch-dav1d @@ -0,0 +1,39 @@ +https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/61ed1182eeb2 +https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/cc33e73618a9 +https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/a4e1dd694014 + +--- libavcodec/libdav1d.c.orig 2021-10-24 20:47:11 UTC ++++ libavcodec/libdav1d.c +@@ -127,7 +127,11 @@ static av_cold int libdav1d_init(AVCodecContext *c) + { + Libdav1dContext *dav1d = c->priv_data; + Dav1dSettings s; ++#if FF_DAV1D_VERSION_AT_LEAST(6,0) ++ int threads = c->thread_count; ++#else + int threads = (c->thread_count ? c->thread_count : av_cpu_count()) * 3 / 2; ++#endif + int res; + + av_log(c, AV_LOG_INFO, "libdav1d %s\n", dav1d_version()); +@@ -153,7 +157,7 @@ static av_cold int libdav1d_init(AVCodecContext *c) + s.n_threads = FFMAX(dav1d->frame_threads, dav1d->tile_threads); + else + s.n_threads = FFMIN(threads, DAV1D_MAX_THREADS); +- s.max_frame_delay = (c->flags & AV_CODEC_FLAG_LOW_DELAY) ? 1 : s.n_threads; ++ s.max_frame_delay = (c->flags & AV_CODEC_FLAG_LOW_DELAY) ? 1 : 0; + av_log(c, AV_LOG_DEBUG, "Using %d threads, %d max_frame_delay\n", + s.n_threads, s.max_frame_delay); + #else +@@ -244,8 +248,10 @@ static int libdav1d_receive_frame(AVCodecContext *c, A + if (res < 0) { + if (res == AVERROR(EINVAL)) + res = AVERROR_INVALIDDATA; +- if (res != AVERROR(EAGAIN)) ++ if (res != AVERROR(EAGAIN)) { ++ dav1d_data_unref(data); + return res; ++ } + } + + res = dav1d_get_picture(dav1d->c, p); diff --git a/multimedia/ffmpeg/files/patch-svtav1 b/multimedia/ffmpeg/files/patch-svtav1 index e960c3d2f81..5cb3b47c42e 100644 --- a/multimedia/ffmpeg/files/patch-svtav1 +++ b/multimedia/ffmpeg/files/patch-svtav1 @@ -5,10 +5,14 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/c5f314309067 https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/c33b4048859a https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/a2b090da7932 https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 +https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/4e47ebf38b97 +https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/50bc87263576 +https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/d794b36a7788 +https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/51c0b9e829be --- configure.orig 2021-10-24 20:47:11 UTC +++ configure -@@ -6430,7 +6430,7 @@ enabled libsoxr && require libsoxr soxr.h so +@@ -6430,7 +6430,7 @@ enabled libsrt && require_pkg_config libsrt enabled libssh && require_pkg_config libssh libssh libssh/sftp.h sftp_init enabled libspeex && require_pkg_config libspeex speex speex/speex.h speex_decoder_init enabled libsrt && require_pkg_config libsrt "srt >= 1.3.0" srt/srt.h srt_socket @@ -19,7 +23,7 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 enabled libtheora && require libtheora theora/theoraenc.h th_info_init -ltheoraenc -ltheoradec -logg --- doc/encoders.texi.orig 2021-10-24 20:47:07 UTC +++ doc/encoders.texi -@@ -1754,28 +1754,15 @@ Set the operating point level. +@@ -1754,28 +1754,15 @@ Set the operating point tier. @item tier Set the operating point tier. @@ -51,7 +55,7 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 @item qp Set the quantizer used in cqp rate control mode (0-63). -@@ -1786,14 +1773,18 @@ Enable scene change detection. +@@ -1786,14 +1773,18 @@ Set number of frames to look ahead (0-120). Set number of frames to look ahead (0-120). @item preset @@ -74,44 +78,51 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 --- libavcodec/libsvtav1.c.orig 2021-10-24 20:47:07 UTC +++ libavcodec/libsvtav1.c -@@ -37,6 +37,10 @@ - #include "avcodec.h" - #include "profiles.h" - -+#ifndef SVT_AV1_CHECK_VERSION -+#define SVT_AV1_CHECK_VERSION(major, minor, patch) 0 -+#endif -+ - typedef enum eos_status { - EOS_NOT_REACHED = 0, - EOS_SENT, -@@ -60,10 +64,11 @@ typedef struct SvtContext { +@@ -60,17 +60,20 @@ typedef struct SvtContext { EOS_STATUS eos_flag; // User options. + AVDictionary *svtav1_opts; ++#if FF_API_SVTAV1_OPTS int hierarchical_level; int la_depth; - int enc_mode; +- int enc_mode; - int rc_mode; -+ int crf; int scd; - int qp; +- int qp; -@@ -151,7 +156,63 @@ static int config_enc_params(EbSvtAv1EncConfiguration + int tier; + + int tile_columns; + int tile_rows; ++#endif ++ int enc_mode; ++ int crf; ++ int qp; + } SvtContext; + + static const struct { +@@ -151,7 +154,62 @@ static int config_enc_params(EbSvtAv1EncConfiguration { SvtContext *svt_enc = avctx->priv_data; const AVPixFmtDescriptor *desc; + AVDictionaryEntry *en = NULL; + // Update param from options ++#if FF_API_SVTAV1_OPTS + param->hierarchical_levels = svt_enc->hierarchical_level; ++ param->tier = svt_enc->tier; ++ param->scene_change_detection = svt_enc->scd; ++ param->tile_columns = svt_enc->tile_columns; ++ param->tile_rows = svt_enc->tile_rows; ++ ++ if (svt_enc->la_depth >= 0) ++ param->look_ahead_distance = svt_enc->la_depth; ++#endif + + if (svt_enc->enc_mode >= 0) + param->enc_mode = svt_enc->enc_mode; + -+ param->tier = svt_enc->tier; -+ + if (avctx->bit_rate) { + param->target_bit_rate = avctx->bit_rate; + if (avctx->rc_max_rate != avctx->bit_rate) @@ -125,19 +136,11 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 + if (svt_enc->crf > 0) { + param->qp = svt_enc->crf; + param->rate_control_mode = 0; -+ param->enable_tpl_la = 1; + } else if (svt_enc->qp > 0) { + param->qp = svt_enc->qp; + param->rate_control_mode = 0; -+ param->enable_tpl_la = 0; ++ param->enable_adaptive_quantization = 0; + } -+ param->scene_change_detection = svt_enc->scd; -+ -+ if (svt_enc->la_depth >= 0) -+ param->look_ahead_distance = svt_enc->la_depth; -+ -+ param->tile_columns = svt_enc->tile_columns; -+ param->tile_rows = svt_enc->tile_rows; + +#if SVT_AV1_CHECK_VERSION(0, 9, 1) + while ((en = av_dict_get(svt_enc->svtav1_opts, "", en, AV_DICT_IGNORE_SUFFIX))) { @@ -162,7 +165,24 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 param->source_width = avctx->width; param->source_height = avctx->height; -@@ -184,16 +245,6 @@ static int config_enc_params(EbSvtAv1EncConfiguration +@@ -169,6 +227,16 @@ static int config_enc_params(EbSvtAv1EncConfiguration + return AVERROR(EINVAL); + } + ++ param->color_primaries = avctx->color_primaries; ++ param->matrix_coefficients = (desc->flags & AV_PIX_FMT_FLAG_RGB) ? ++ AVCOL_SPC_RGB : avctx->colorspace; ++ param->transfer_characteristics = avctx->color_trc; ++ ++ if (avctx->color_range != AVCOL_RANGE_UNSPECIFIED) ++ param->color_range = avctx->color_range == AVCOL_RANGE_JPEG; ++ else ++ param->color_range = !!(desc->flags & AV_PIX_FMT_FLAG_RGB); ++ + if (avctx->profile != FF_PROFILE_UNKNOWN) + param->profile = avctx->profile; + +@@ -184,16 +252,6 @@ static int config_enc_params(EbSvtAv1EncConfiguration param->profile = FF_PROFILE_AV1_HIGH; } @@ -179,7 +199,7 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 if (avctx->gop_size > 0) param->intra_period_length = avctx->gop_size - 1; -@@ -205,19 +256,15 @@ static int config_enc_params(EbSvtAv1EncConfiguration +@@ -205,19 +263,15 @@ static int config_enc_params(EbSvtAv1EncConfiguration param->frame_rate_denominator = avctx->time_base.num * avctx->ticks_per_frame; } @@ -203,18 +223,39 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 return 0; } -@@ -480,8 +527,8 @@ static const AVOption options[] = { - { "la_depth", "Look ahead distance [0, 120]", OFFSET(la_depth), - AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 120, VE }, +@@ -472,21 +526,22 @@ static const AVOption options[] = { + #define OFFSET(x) offsetof(SvtContext, x) + #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM + static const AVOption options[] = { +- { "hielevel", "Hierarchical prediction levels setting", OFFSET(hierarchical_level), +- AV_OPT_TYPE_INT, { .i64 = 4 }, 3, 4, VE , "hielevel"}, ++#if FF_API_SVTAV1_OPTS ++ { "hielevel", "Hierarchical prediction levels setting (Deprecated, use svtav1-params)", OFFSET(hierarchical_level), ++ AV_OPT_TYPE_INT, { .i64 = 4 }, 3, 4, VE | AV_OPT_FLAG_DEPRECATED , "hielevel"}, + { "3level", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 3 }, INT_MIN, INT_MAX, VE, "hielevel" }, + { "4level", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 4 }, INT_MIN, INT_MAX, VE, "hielevel" }, + +- { "la_depth", "Look ahead distance [0, 120]", OFFSET(la_depth), +- AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 120, VE }, ++ { "la_depth", "Look ahead distance [0, 120] (Deprecated, use svtav1-params)", OFFSET(la_depth), ++ AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 120, VE | AV_OPT_FLAG_DEPRECATED }, - { "preset", "Encoding preset [0, 8]", - OFFSET(enc_mode), AV_OPT_TYPE_INT, { .i64 = MAX_ENC_PRESET }, 0, MAX_ENC_PRESET, VE }, +- +- { "tier", "Set operating point tier", OFFSET(tier), +- AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE, "tier" }, ++ { "tier", "Set operating point tier (Deprecated, use svtav1-params)", OFFSET(tier), ++ AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE | AV_OPT_FLAG_DEPRECATED, "tier" }, + { "main", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, 0, 0, VE, "tier" }, + { "high", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, VE, "tier" }, ++#endif + { "preset", "Encoding preset", + OFFSET(enc_mode), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, MAX_ENC_PRESET, VE }, - { "tier", "Set operating point tier", OFFSET(tier), - AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE, "tier" }, -@@ -518,21 +565,19 @@ static const AVOption options[] = { + FF_AV1_PROFILE_OPTS + +@@ -518,21 +573,20 @@ static const AVOption options[] = { { LEVEL("7.3", 73) }, #undef LEVEL @@ -227,22 +268,27 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 + AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 63, VE }, + { "qp", "Initial Quantizer level value", OFFSET(qp), + AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 63, VE }, ++#if FF_API_SVTAV1_OPTS ++ { "sc_detection", "Scene change detection (Deprecated, use svtav1-params)", OFFSET(scd), ++ AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE | AV_OPT_FLAG_DEPRECATED }, - { "qp", "Quantizer to use with cqp rate control mode", OFFSET(qp), - AV_OPT_TYPE_INT, { .i64 = 50 }, 0, 63, VE }, -- - { "sc_detection", "Scene change detection", OFFSET(scd), - AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE }, - - { "tile_columns", "Log2 of number of tile columns to use", OFFSET(tile_columns), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 4, VE}, - { "tile_rows", "Log2 of number of tile rows to use", OFFSET(tile_rows), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 6, VE}, ++ { "tile_columns", "Log2 of number of tile columns to use (Deprecated, use svtav1-params)", OFFSET(tile_columns), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 4, VE | AV_OPT_FLAG_DEPRECATED }, ++ { "tile_rows", "Log2 of number of tile rows to use (Deprecated, use svtav1-params)", OFFSET(tile_rows), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 6, VE | AV_OPT_FLAG_DEPRECATED }, ++#endif +- { "sc_detection", "Scene change detection", OFFSET(scd), +- AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE }, + { "svtav1-params", "Set the SVT-AV1 configuration using a :-separated list of key=value parameters", OFFSET(svtav1_opts), AV_OPT_TYPE_DICT, { 0 }, 0, 0, VE }, -+ + +- { "tile_columns", "Log2 of number of tile columns to use", OFFSET(tile_columns), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 4, VE}, +- { "tile_rows", "Log2 of number of tile rows to use", OFFSET(tile_rows), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 6, VE}, +- {NULL}, }; -@@ -544,9 +589,10 @@ static const AVClass class = { +@@ -544,9 +598,10 @@ static const AVCodecDefault eb_enc_defaults[] = { }; static const AVCodecDefault eb_enc_defaults[] = { @@ -255,7 +301,7 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 { "qmax", "63" }, { NULL }, }; -@@ -561,12 +607,11 @@ AVCodec ff_libsvtav1_encoder = { +@@ -561,12 +616,11 @@ AVCodec ff_libsvtav1_encoder = { .receive_packet = eb_receive_packet, .close = eb_enc_close, .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_OTHER_THREADS, @@ -269,3 +315,14 @@ https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/1dddb930aaf0 - .caps_internal = FF_CODEC_CAP_INIT_CLEANUP, .wrapper_name = "libsvtav1", }; +--- libavcodec/version.h.orig 2021-10-24 20:47:07 UTC ++++ libavcodec/version.h +@@ -168,5 +168,8 @@ + #ifndef FF_API_INIT_PACKET + #define FF_API_INIT_PACKET (LIBAVCODEC_VERSION_MAJOR < 60) + #endif ++#ifndef FF_API_SVTAV1_OPTS ++#define FF_API_SVTAV1_OPTS (LIBAVCODEC_VERSION_MAJOR < 60) ++#endif + + #endif /* AVCODEC_VERSION_H */ diff --git a/multimedia/gmerlin/Makefile b/multimedia/gmerlin/Makefile index 717cc88052d..c5d4417c34c 100644 --- a/multimedia/gmerlin/Makefile +++ b/multimedia/gmerlin/Makefile @@ -1,6 +1,6 @@ PORTNAME= gmerlin PORTVERSION= 1.2.0 -PORTREVISION= 19 +PORTREVISION= 20 CATEGORIES= multimedia MASTER_SITES= SF diff --git a/multimedia/gstreamer1-editing-services/Makefile b/multimedia/gstreamer1-editing-services/Makefile index 49fa5a4fbe4..328ced8f627 100644 --- a/multimedia/gstreamer1-editing-services/Makefile +++ b/multimedia/gstreamer1-editing-services/Makefile @@ -2,6 +2,7 @@ PORTNAME= gstreamer1-editing-services PORTVERSION= 1.16.2 +PORTREVISION= 1 CATEGORIES= multimedia MASTER_SITES= http://gstreamer.freedesktop.org/src/gstreamer-editing-services/ DISTNAME= gstreamer-editing-services-${PORTVERSION} diff --git a/multimedia/gstreamer1-plugins-aom/Makefile b/multimedia/gstreamer1-plugins-aom/Makefile index c8e7706aa7f..1f6903527e8 100644 --- a/multimedia/gstreamer1-plugins-aom/Makefile +++ b/multimedia/gstreamer1-plugins-aom/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 15 +PORTREVISION= 16 CATEGORIES= multimedia COMMENT= GStreamer (libaom) plugin diff --git a/multimedia/gstreamer1-plugins-assrender/Makefile b/multimedia/gstreamer1-plugins-assrender/Makefile index edd02823315..3855cf4aa39 100644 --- a/multimedia/gstreamer1-plugins-assrender/Makefile +++ b/multimedia/gstreamer1-plugins-assrender/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer ASS/SSA text renderer plugin diff --git a/multimedia/gstreamer1-plugins-bad/Makefile b/multimedia/gstreamer1-plugins-bad/Makefile index 9a33a7d5ca3..fb21c4f5170 100644 --- a/multimedia/gstreamer1-plugins-bad/Makefile +++ b/multimedia/gstreamer1-plugins-bad/Makefile @@ -1,6 +1,6 @@ # Created by: Michael Johnson -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer-plugins that need more quality, testing or documentation diff --git a/multimedia/gstreamer1-plugins-dash/Makefile b/multimedia/gstreamer1-plugins-dash/Makefile index e5f5d1b9fc3..65183981602 100644 --- a/multimedia/gstreamer1-plugins-dash/Makefile +++ b/multimedia/gstreamer1-plugins-dash/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer Dynamic Adaptive Streaming over HTTP demuxer plugin diff --git a/multimedia/gstreamer1-plugins-dts/Makefile b/multimedia/gstreamer1-plugins-dts/Makefile index 904cf3f44a8..49ccbedba34 100644 --- a/multimedia/gstreamer1-plugins-dts/Makefile +++ b/multimedia/gstreamer1-plugins-dts/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer dts audio decode plugin diff --git a/multimedia/gstreamer1-plugins-dv/Makefile b/multimedia/gstreamer1-plugins-dv/Makefile index c22d6affdcc..b98691d9201 100644 --- a/multimedia/gstreamer1-plugins-dv/Makefile +++ b/multimedia/gstreamer1-plugins-dv/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia PKGNAMESUFFIX= 1-plugins-dv diff --git a/multimedia/gstreamer1-plugins-dvdread/Makefile b/multimedia/gstreamer1-plugins-dvdread/Makefile index e40869c61e7..795a142d384 100644 --- a/multimedia/gstreamer1-plugins-dvdread/Makefile +++ b/multimedia/gstreamer1-plugins-dvdread/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= multimedia COMMENT= GStreamer DVD access plugin with libdvdread diff --git a/multimedia/gstreamer1-plugins-good/Makefile b/multimedia/gstreamer1-plugins-good/Makefile index 924add2272f..2f9d3241790 100644 --- a/multimedia/gstreamer1-plugins-good/Makefile +++ b/multimedia/gstreamer1-plugins-good/Makefile @@ -1,6 +1,6 @@ # Created by: Michael Johnson -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer-plugins good-quality plug-ins diff --git a/multimedia/gstreamer1-plugins-hls/Makefile b/multimedia/gstreamer1-plugins-hls/Makefile index ea0b8760b1c..e8dd09857a7 100644 --- a/multimedia/gstreamer1-plugins-hls/Makefile +++ b/multimedia/gstreamer1-plugins-hls/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia COMMENT= GStreamer HTTP Live Streaming (HLS) plugin diff --git a/multimedia/gstreamer1-plugins-kate/Makefile b/multimedia/gstreamer1-plugins-kate/Makefile index 8ee321608ae..2cc5c053a7a 100644 --- a/multimedia/gstreamer1-plugins-kate/Makefile +++ b/multimedia/gstreamer1-plugins-kate/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer kate codec plugin diff --git a/multimedia/gstreamer1-plugins-libde265/Makefile b/multimedia/gstreamer1-plugins-libde265/Makefile index 4e3520fb46c..09c23ab96e4 100644 --- a/multimedia/gstreamer1-plugins-libde265/Makefile +++ b/multimedia/gstreamer1-plugins-libde265/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer HEVC/H.265 decoder (libde265) plugin diff --git a/multimedia/gstreamer1-plugins-mpeg2dec/Makefile b/multimedia/gstreamer1-plugins-mpeg2dec/Makefile index c19cc792cb3..aadbd2a2915 100644 --- a/multimedia/gstreamer1-plugins-mpeg2dec/Makefile +++ b/multimedia/gstreamer1-plugins-mpeg2dec/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer mpeg decode plugin diff --git a/multimedia/gstreamer1-plugins-mpeg2enc/Makefile b/multimedia/gstreamer1-plugins-mpeg2enc/Makefile index 9ac26f64d57..80991330d7a 100644 --- a/multimedia/gstreamer1-plugins-mpeg2enc/Makefile +++ b/multimedia/gstreamer1-plugins-mpeg2enc/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer mpeg encoder plugin diff --git a/multimedia/gstreamer1-plugins-mplex/Makefile b/multimedia/gstreamer1-plugins-mplex/Makefile index 36607a8a159..37fe33200ef 100644 --- a/multimedia/gstreamer1-plugins-mplex/Makefile +++ b/multimedia/gstreamer1-plugins-mplex/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer mplex plugin diff --git a/multimedia/gstreamer1-plugins-msdk/Makefile b/multimedia/gstreamer1-plugins-msdk/Makefile index 5ddcd79698c..b475d676290 100644 --- a/multimedia/gstreamer1-plugins-msdk/Makefile +++ b/multimedia/gstreamer1-plugins-msdk/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer (Intel MediaSDK) plugin diff --git a/multimedia/gstreamer1-plugins-openh264/Makefile b/multimedia/gstreamer1-plugins-openh264/Makefile index 8d621be3e14..a4ebd822bf8 100644 --- a/multimedia/gstreamer1-plugins-openh264/Makefile +++ b/multimedia/gstreamer1-plugins-openh264/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia COMMENT= GStreamer (openh264) plugin diff --git a/multimedia/gstreamer1-plugins-resindvd/Makefile b/multimedia/gstreamer1-plugins-resindvd/Makefile index 4ab318edfed..b4eb187a339 100644 --- a/multimedia/gstreamer1-plugins-resindvd/Makefile +++ b/multimedia/gstreamer1-plugins-resindvd/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= multimedia COMMENT= GStreamer resindvd DVD playback plugin diff --git a/multimedia/gstreamer1-plugins-rtmp/Makefile b/multimedia/gstreamer1-plugins-rtmp/Makefile index 86a37c7f417..ca5490a6eaf 100644 --- a/multimedia/gstreamer1-plugins-rtmp/Makefile +++ b/multimedia/gstreamer1-plugins-rtmp/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer RTMP stream source and sink plugin diff --git a/multimedia/gstreamer1-plugins-smoothstreaming/Makefile b/multimedia/gstreamer1-plugins-smoothstreaming/Makefile index 6a06f7fa033..3c6dcc42885 100644 --- a/multimedia/gstreamer1-plugins-smoothstreaming/Makefile +++ b/multimedia/gstreamer1-plugins-smoothstreaming/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer parse and demuliplex a Smooth Streaming manifest into audio/video streams plugin diff --git a/multimedia/gstreamer1-plugins-theora/Makefile b/multimedia/gstreamer1-plugins-theora/Makefile index 7ca7fcac6d4..5c40241942b 100644 --- a/multimedia/gstreamer1-plugins-theora/Makefile +++ b/multimedia/gstreamer1-plugins-theora/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer theora plugin diff --git a/multimedia/gstreamer1-plugins-ttml/Makefile b/multimedia/gstreamer1-plugins-ttml/Makefile index f41e42af29e..d261204045a 100644 --- a/multimedia/gstreamer1-plugins-ttml/Makefile +++ b/multimedia/gstreamer1-plugins-ttml/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer ttml subtitle plugin diff --git a/multimedia/gstreamer1-plugins-ugly/Makefile b/multimedia/gstreamer1-plugins-ugly/Makefile index cb496b2e934..c796d9e65a5 100644 --- a/multimedia/gstreamer1-plugins-ugly/Makefile +++ b/multimedia/gstreamer1-plugins-ugly/Makefile @@ -1,6 +1,6 @@ # Created by: Michael Johnson -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= multimedia COMMENT= GStreamer-plugins set of good-quality plug-ins that might have distribution problems diff --git a/multimedia/gstreamer1-plugins-v4l2/Makefile b/multimedia/gstreamer1-plugins-v4l2/Makefile index fca4b0b92f2..7c6a05694ba 100644 --- a/multimedia/gstreamer1-plugins-v4l2/Makefile +++ b/multimedia/gstreamer1-plugins-v4l2/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia PKGNAMESUFFIX= 1-plugins-v4l2 diff --git a/multimedia/gstreamer1-plugins-vpx/Makefile b/multimedia/gstreamer1-plugins-vpx/Makefile index 58d39b1894b..0cb1c927298 100644 --- a/multimedia/gstreamer1-plugins-vpx/Makefile +++ b/multimedia/gstreamer1-plugins-vpx/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia COMMENT= GStreamer vp8 codec plugin diff --git a/multimedia/gstreamer1-plugins-x264/Makefile b/multimedia/gstreamer1-plugins-x264/Makefile index 11e81266684..c7c615f5a4e 100644 --- a/multimedia/gstreamer1-plugins-x264/Makefile +++ b/multimedia/gstreamer1-plugins-x264/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= multimedia COMMENT= GStreamer libx264 based H264 plugin diff --git a/multimedia/gstreamer1-plugins-x265/Makefile b/multimedia/gstreamer1-plugins-x265/Makefile index 617af5d501b..b2891e48f31 100644 --- a/multimedia/gstreamer1-plugins-x265/Makefile +++ b/multimedia/gstreamer1-plugins-x265/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia COMMENT= GStreamer libx265 based H265 plugin diff --git a/multimedia/gstreamer1-plugins/Makefile b/multimedia/gstreamer1-plugins/Makefile index b9de47d13ed..4eee2fbce2b 100644 --- a/multimedia/gstreamer1-plugins/Makefile +++ b/multimedia/gstreamer1-plugins/Makefile @@ -4,7 +4,7 @@ PORTNAME= gstreamer PORTVERSION?= ${BASE_PORTVERSION} # When chasing a shared library for a plug-in bump the PORTREVISION in the # plug-in port instead, like ${category}/gstreamer1-plugin-${PLUGIN}. -PORTREVISION?= 3 +PORTREVISION?= 4 CATEGORIES?= multimedia audio MASTER_SITES= GNOME/sources/gst-plugins-base/${PORTVERSION:R}:base \ GNOME/sources/gst-plugins-good/${PORTVERSION:R}:good \ diff --git a/multimedia/gstreamermm/Makefile b/multimedia/gstreamermm/Makefile index 55287279ea1..abed58cae16 100644 --- a/multimedia/gstreamermm/Makefile +++ b/multimedia/gstreamermm/Makefile @@ -3,6 +3,7 @@ PORTNAME= gstreamermm PORTVERSION= 1.10.0 +PORTREVISION= 1 CATEGORIES= multimedia MASTER_SITES= GNOME diff --git a/multimedia/gtk-youtube-viewer/Makefile b/multimedia/gtk-youtube-viewer/Makefile index 497bdcc97bb..e6560b220a8 100644 --- a/multimedia/gtk-youtube-viewer/Makefile +++ b/multimedia/gtk-youtube-viewer/Makefile @@ -1,7 +1,7 @@ # Created by: Rusmir Dusko PORTNAME= gtk-youtube-viewer -PORTVERSION= 3.9.7 +PORTVERSION= 3.9.8 CATEGORIES= multimedia perl5 MAINTAINER= nc@FreeBSD.org diff --git a/multimedia/gtk-youtube-viewer/distinfo b/multimedia/gtk-youtube-viewer/distinfo index 00e2a091496..e839a9ca304 100644 --- a/multimedia/gtk-youtube-viewer/distinfo +++ b/multimedia/gtk-youtube-viewer/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642100136 -SHA256 (trizen-youtube-viewer-3.9.7_GH0.tar.gz) = 0b7ccf18e7aa71b2b82214e3a5614e3036bc98d94951b9bd7aba8da952486257 -SIZE (trizen-youtube-viewer-3.9.7_GH0.tar.gz) = 260891 +TIMESTAMP = 1648411578 +SHA256 (trizen-youtube-viewer-3.9.8_GH0.tar.gz) = d002015b16808a322c09e52f82653f675335692b080f89589cb72850fc30dec1 +SIZE (trizen-youtube-viewer-3.9.8_GH0.tar.gz) = 261368 diff --git a/multimedia/handbrake/Makefile b/multimedia/handbrake/Makefile index ff69c08b591..46644f5a6a7 100644 --- a/multimedia/handbrake/Makefile +++ b/multimedia/handbrake/Makefile @@ -2,6 +2,7 @@ PORTNAME= handbrake DISTVERSION= 1.5.1 +PORTREVISION= 1 CATEGORIES= multimedia MASTER_SITES= https://github.com/HandBrake/HandBrake/releases/download/${DISTVERSION}/ MASTER_SITES+= https://github.com/HandBrake/HandBrake-contribs/releases/download/contribs/:contrib diff --git a/multimedia/imagination/Makefile b/multimedia/imagination/Makefile index 37e5f6d87ad..9b5dc0936d4 100644 --- a/multimedia/imagination/Makefile +++ b/multimedia/imagination/Makefile @@ -2,7 +2,7 @@ PORTNAME= imagination PORTVERSION= 3.0 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= multimedia MASTER_SITES= SF diff --git a/multimedia/kodi/Makefile b/multimedia/kodi/Makefile index 19297eda21a..db44c574ac3 100644 --- a/multimedia/kodi/Makefile +++ b/multimedia/kodi/Makefile @@ -1,7 +1,7 @@ PORTNAME= kodi DISTVERSION= 19.4 DISTVERSIONSUFFIX= -${KODI_CODENAME} -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia java MAINTAINER= yzrh@noema.org diff --git a/multimedia/libbluray/Makefile b/multimedia/libbluray/Makefile index ec4d5265c02..3bf5c3abc4d 100644 --- a/multimedia/libbluray/Makefile +++ b/multimedia/libbluray/Makefile @@ -2,6 +2,7 @@ PORTNAME= libbluray DISTVERSION= 1.3.0 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= multimedia MASTER_SITES= https://download.videolan.org/pub/videolan/${PORTNAME}/${DISTVERSION}/ diff --git a/multimedia/libmediaart/Makefile b/multimedia/libmediaart/Makefile index 2ddae65502c..85363b87c06 100644 --- a/multimedia/libmediaart/Makefile +++ b/multimedia/libmediaart/Makefile @@ -22,6 +22,7 @@ USE_LDCONFIG= yes MESON_ARGS= -Dimage_library=gdk-pixbuf OPTIONS_DEFINE= DOCS +OPTIONS_EXCLUDE=DOCS # https://gitlab.gnome.org/GNOME/libmediaart/-/issues/4 DOCS_BUILD_DEPENDS= gtk-doc>=0:textproc/gtk-doc DOCS_MESON_TRUE= gtk_doc #option('image_library', type: 'combo', choices: ['auto', 'gdk-pixbuf', 'qt4', 'qt5'], diff --git a/multimedia/libmediainfo/Makefile b/multimedia/libmediainfo/Makefile index c5c69a9d743..0c2d6c1365f 100644 --- a/multimedia/libmediainfo/Makefile +++ b/multimedia/libmediainfo/Makefile @@ -1,8 +1,8 @@ -# Created by: Sunpoet Po-Chuan Hsieh +# Created by: Po-Chuan Hsieh PORTNAME= libmediainfo PORTVERSION= 21.09 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia MASTER_SITES= https://mediaarea.net/download/binary/mediainfo/${PORTVERSION}/ \ LOCAL/sunpoet @@ -16,12 +16,6 @@ LICENSE_FILE= ${WRKSRC}/../../../License.html LIB_DEPENDS= libzen.so:multimedia/libzen -CONFLICTS_INSTALL= mediainfo-0.[0-6]* mediainfo-0.7.[0-5]* - -OPTIONS_DEFINE= CURL MMS TINYXML2 -OPTIONS_DEFAULT=TINYXML2 -TINYXML2_DESC= Use textproc/tinyxml2 instead of bundled one - USES= alias compiler:c++11-lang libtool localbase pathfix pkgconfig tar:xz CFLAGS+= -D_POSIX_PRIORITY_SCHEDULING @@ -33,6 +27,10 @@ USE_LDCONFIG= yes WRKSRC= ${WRKDIR}/MediaInfo_CLI_GNU_FromSource/MediaInfoLib/Project/GNU/Library +OPTIONS_DEFINE= CURL MMS TINYXML2 +OPTIONS_DEFAULT=TINYXML2 +TINYXML2_DESC= Use textproc/tinyxml2 instead of bundled one + CURL_CONFIGURE_ON= --with-libcurl=${LOCALBASE} CURL_LIB_DEPENDS= libcurl.so:ftp/curl MMS_CONFIGURE_ON= --with-libmms=${LOCALBASE} diff --git a/multimedia/libmediainfo/files/patch-Makefile.am b/multimedia/libmediainfo/files/patch-Makefile.am deleted file mode 100644 index c61eb054602..00000000000 --- a/multimedia/libmediainfo/files/patch-Makefile.am +++ /dev/null @@ -1,22 +0,0 @@ -Obtained from: https://github.com/MediaArea/MediaInfoLib/commit/e40d91326ab070f88d3e8d194268ce3cd9275028 - ---- Makefile.am.orig 2021-09-17 08:01:27 UTC -+++ Makefile.am -@@ -230,7 +230,8 @@ lib@MediaInfoLib_LibName@_la_SOURCES = \ - ../../../Source/MediaInfo/Video/File_Vp8.cpp \ - ../../../Source/MediaInfo/Video/File_Y4m.cpp \ - ../../../Source/MediaInfo/XmlUtils.cpp \ -- ../../../Source/MediaInfo/OutputHelpers.cpp -+ ../../../Source/MediaInfo/OutputHelpers.cpp \ -+ ../../../Source/ThirdParty/tfsxml/tfsxml.c - - @MediaInfoLib_LibName@includedir = $(includedir)/MediaInfo - @MediaInfoLib_LibName@include_HEADERS = \ -@@ -280,7 +281,6 @@ endif - - if COMPILE_TINYXML2 - lib@MediaInfoLib_LibName@_la_SOURCES += \ -- ../../../Source/ThirdParty/tfsxml/tfsxml.c \ - ../../../Source/ThirdParty/tinyxml2/tinyxml2.cpp - endif - diff --git a/multimedia/libmediainfo/files/patch-Makefile.in b/multimedia/libmediainfo/files/patch-Makefile.in new file mode 100644 index 00000000000..2727743b3aa --- /dev/null +++ b/multimedia/libmediainfo/files/patch-Makefile.in @@ -0,0 +1,42 @@ +Obtained from: https://github.com/MediaArea/MediaInfoLib/commit/e40d91326ab070f88d3e8d194268ce3cd9275028 (based on) + +--- Makefile.in.orig 2021-09-17 08:01:27 UTC ++++ Makefile.in +@@ -113,7 +113,6 @@ host_triplet = @host@ + @COMPILE_HMAC_TRUE@ ../../../Source/ThirdParty/hmac-gladman/hmac.c + + @COMPILE_TINYXML2_TRUE@am__append_7 = \ +-@COMPILE_TINYXML2_TRUE@ ../../../Source/ThirdParty/tfsxml/tfsxml.c \ + @COMPILE_TINYXML2_TRUE@ ../../../Source/ThirdParty/tinyxml2/tinyxml2.cpp + + subdir = . +@@ -416,8 +415,7 @@ am__dirstamp = $(am__leading_dot)dirstamp + @COMPILE_SHA1_TRUE@am__objects_4 = ../../../Source/ThirdParty/sha1-gladman/sha1.lo + @COMPILE_SHA2_TRUE@am__objects_5 = ../../../Source/ThirdParty/sha2-gladman/sha2.lo + @COMPILE_HMAC_TRUE@am__objects_6 = ../../../Source/ThirdParty/hmac-gladman/hmac.lo +-@COMPILE_TINYXML2_TRUE@am__objects_7 = ../../../Source/ThirdParty/tfsxml/tfsxml.lo \ +-@COMPILE_TINYXML2_TRUE@ ../../../Source/ThirdParty/tinyxml2/tinyxml2.lo ++@COMPILE_TINYXML2_TRUE@am__objects_7 = ../../../Source/ThirdParty/tinyxml2/tinyxml2.lo + am_lib@MediaInfoLib_LibName@_la_OBJECTS = \ + ../../../Source/MediaInfo/File__Analyze.lo \ + ../../../Source/MediaInfo/File__Analyze_Buffer.lo \ +@@ -647,7 +645,8 @@ am_lib@MediaInfoLib_LibName@_la_OBJECTS = \ + ../../../Source/MediaInfo/Video/File_Vp8.lo \ + ../../../Source/MediaInfo/Video/File_Y4m.lo \ + ../../../Source/MediaInfo/XmlUtils.lo \ +- ../../../Source/MediaInfo/OutputHelpers.lo $(am__objects_1) \ ++ ../../../Source/MediaInfo/OutputHelpers.lo \ ++ ../../../Source/ThirdParty/tfsxml/tfsxml.lo $(am__objects_1) \ + $(am__objects_2) $(am__objects_3) $(am__objects_4) \ + $(am__objects_5) $(am__objects_6) $(am__objects_7) + lib@MediaInfoLib_LibName@_la_OBJECTS = \ +@@ -1427,7 +1427,8 @@ lib@MediaInfoLib_LibName@_la_SOURCES = \ + ../../../Source/MediaInfo/Video/File_Vp8.cpp \ + ../../../Source/MediaInfo/Video/File_Y4m.cpp \ + ../../../Source/MediaInfo/XmlUtils.cpp \ +- ../../../Source/MediaInfo/OutputHelpers.cpp $(am__append_1) \ ++ ../../../Source/MediaInfo/OutputHelpers.cpp \ ++ ../../../Source/ThirdParty/tfsxml/tfsxml.c $(am__append_1) \ + $(am__append_2) $(am__append_3) $(am__append_4) \ + $(am__append_5) $(am__append_6) $(am__append_7) + @MediaInfoLib_LibName@includedir = $(includedir)/MediaInfo diff --git a/multimedia/mlt6/Makefile b/multimedia/mlt6/Makefile index cfed953f813..d87402908df 100644 --- a/multimedia/mlt6/Makefile +++ b/multimedia/mlt6/Makefile @@ -1,6 +1,6 @@ PORTNAME= ${_MLT_PORTNAME} DISTVERSION= ${_MLT_VERSION} -PORTREVISION?= 4 +PORTREVISION?= 5 CATEGORIES?= multimedia # Don't use GitHub "fake" downloads that are auto-generated from a # tag; the project uploads tarballs for each release. diff --git a/multimedia/mlt7/Makefile b/multimedia/mlt7/Makefile index c9ab5816ccf..069c0899a0a 100644 --- a/multimedia/mlt7/Makefile +++ b/multimedia/mlt7/Makefile @@ -1,5 +1,6 @@ PORTNAME= ${_MLT_PORTNAME} DISTVERSION= ${_MLT_VERSION} +PORTREVISION= 1 CATEGORIES?= multimedia # Don't use GitHub "fake" downloads that are auto-generated from a # tag; the project uploads tarballs for each release. diff --git a/multimedia/mythtv-frontend/Makefile b/multimedia/mythtv-frontend/Makefile index a611f2b3305..29e437214c5 100644 --- a/multimedia/mythtv-frontend/Makefile +++ b/multimedia/mythtv-frontend/Makefile @@ -1,6 +1,6 @@ # Created by: Bernhard Froehlich -PORTREVISION= 1 +PORTREVISION= 2 PKGNAMESUFFIX= -${SLAVEPORT} MAINTAINER= ahicks@p-o.co.uk diff --git a/multimedia/mythtv/Makefile b/multimedia/mythtv/Makefile index 3cd15abf1e6..e10a889074a 100644 --- a/multimedia/mythtv/Makefile +++ b/multimedia/mythtv/Makefile @@ -3,7 +3,7 @@ PORTNAME= mythtv DISTVERSIONPREFIX= v DISTVERSION= 31.0 -PORTREVISION= 5 +PORTREVISION= 6 PORTEPOCH= 1 CATEGORIES= multimedia diff --git a/multimedia/ogmrip/Makefile b/multimedia/ogmrip/Makefile index 0fe2d2df9ad..e44d9a64856 100644 --- a/multimedia/ogmrip/Makefile +++ b/multimedia/ogmrip/Makefile @@ -2,7 +2,7 @@ PORTNAME= ogmrip PORTVERSION= 1.0.1 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= multimedia MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTVERSION:R}/${PORTVERSION} diff --git a/multimedia/phonon-gstreamer/Makefile b/multimedia/phonon-gstreamer/Makefile index 8cff17b26c6..79a691a48e5 100644 --- a/multimedia/phonon-gstreamer/Makefile +++ b/multimedia/phonon-gstreamer/Makefile @@ -2,7 +2,7 @@ PORTNAME= phonon DISTVERSION= 4.10.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= multimedia kde MASTER_SITES= KDE/stable/${PORTNAME}/${PORTNAME}-backend-${PHONON_PLUGIN}/${DISTVERSION} DISTNAME= ${PORTNAME}-backend-${PHONON_PLUGIN}-${DISTVERSION} diff --git a/multimedia/svt-av1/Makefile b/multimedia/svt-av1/Makefile index 4f9cc355ca8..0647c9a75b7 100644 --- a/multimedia/svt-av1/Makefile +++ b/multimedia/svt-av1/Makefile @@ -31,7 +31,7 @@ CMAKE_OFF= NATIVE OPTIONS_DEFINE= LTO OPTIONS_DEFAULT=LTO OPTIONS_EXCLUDE_powerpc64= ${"${/usr/bin/ld:L:tA}"==/usr/bin/ld.lld:?LTO:} # https://github.com/llvm/llvm-project/issues/46697 -OPTIONS_EXCLUDE_riscv64= LTO +OPTIONS_EXCLUDE_riscv64= LTO # bug 262871 LTO_CMAKE_BOOL= CMAKE_INTERPROCEDURAL_OPTIMIZATION LTO_CMAKE_ON= -DCMAKE_POLICY_DEFAULT_CMP0069:STRING=NEW diff --git a/multimedia/totem-pl-parser/Makefile b/multimedia/totem-pl-parser/Makefile index 2eb2bd03637..69cfbcdb891 100644 --- a/multimedia/totem-pl-parser/Makefile +++ b/multimedia/totem-pl-parser/Makefile @@ -2,6 +2,7 @@ PORTNAME= totem-pl-parser PORTVERSION= 3.26.5 +PORTREVISION= 1 CATEGORIES= multimedia gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/multimedia/totem/Makefile b/multimedia/totem/Makefile index 1e49f6ceaf3..e67c1e436ca 100644 --- a/multimedia/totem/Makefile +++ b/multimedia/totem/Makefile @@ -2,6 +2,7 @@ PORTNAME= totem PORTVERSION= 3.38.2 +PORTREVISION= 1 CATEGORIES= multimedia gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/multimedia/vcdimager/Makefile b/multimedia/vcdimager/Makefile index 02e78b8e997..0b6f49b29ef 100644 --- a/multimedia/vcdimager/Makefile +++ b/multimedia/vcdimager/Makefile @@ -2,7 +2,7 @@ PORTNAME= vcdimager PORTVERSION= 2.0.1 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= multimedia MASTER_SITES= GNU diff --git a/multimedia/vlc/Makefile b/multimedia/vlc/Makefile index e805e8a88f1..f67a443e1b2 100644 --- a/multimedia/vlc/Makefile +++ b/multimedia/vlc/Makefile @@ -2,6 +2,7 @@ PORTNAME= vlc DISTVERSION= 3.0.17.3 +PORTREVISION= 1 PORTEPOCH= 4 CATEGORIES= multimedia audio net www MASTER_SITES= http://download.videolan.org/pub/videolan/${PORTNAME}/${DISTVERSION:S/a$//}/ \ diff --git a/net-im/cawbird/Makefile b/net-im/cawbird/Makefile index 9ff14c7cf3f..e07e44f9f1f 100644 --- a/net-im/cawbird/Makefile +++ b/net-im/cawbird/Makefile @@ -1,7 +1,6 @@ PORTNAME= cawbird -PORTVERSION= 1.4.2 +PORTVERSION= 1.5 DISTVERSIONPREFIX= v -PORTREVISION= 1 CATEGORIES= net-im MAINTAINER= nc@FreeBSD.org diff --git a/net-im/cawbird/distinfo b/net-im/cawbird/distinfo index 05635e19b4e..b83bc2d32f6 100644 --- a/net-im/cawbird/distinfo +++ b/net-im/cawbird/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1636688445 -SHA256 (IBBoard-cawbird-v1.4.2_GH0.tar.gz) = 4726e1c6c407d7bf4ee24fbf67a453174b7d01433277ce542354ac62e28857c0 -SIZE (IBBoard-cawbird-v1.4.2_GH0.tar.gz) = 653216 +TIMESTAMP = 1648412691 +SHA256 (IBBoard-cawbird-v1.5_GH0.tar.gz) = 0ef472cbecd6a7eb384a5b27833612d61eba7ace22293dbe14571b887b61ecae +SIZE (IBBoard-cawbird-v1.5_GH0.tar.gz) = 675527 diff --git a/net-im/dendrite/Makefile b/net-im/dendrite/Makefile index 45a4efccdda..330609630e8 100644 --- a/net-im/dendrite/Makefile +++ b/net-im/dendrite/Makefile @@ -1,6 +1,6 @@ PORTNAME= dendrite DISTVERSIONPREFIX= v -DISTVERSION= 0.6.5 +DISTVERSION= 0.7.0 CATEGORIES= net-im MAINTAINER= ashish@FreeBSD.org @@ -39,7 +39,6 @@ GH_TUPLE= Arceliar:ironwood:8951369625d0:arceliar_ironwood/vendor/github.com/Arc btcsuite:btcd:v0.20.1-beta:btcsuite_btcd/vendor/github.com/btcsuite/btcd \ census-instrumentation:opencensus-go:v0.22.4:census_instrumentation_opencensus_go/vendor/go.opencensus.io \ cespare:xxhash:v2.1.2:cespare_xxhash_v2/vendor/github.com/cespare/xxhash/v2 \ - cheekybits:genny:v1.0.0:cheekybits_genny/vendor/github.com/cheekybits/genny \ codeclysm:extract:v2.2.0:codeclysm_extract/vendor/github.com/codeclysm/extract \ containerd:containerd:v1.5.9:containerd_containerd/vendor/github.com/containerd/containerd \ davidlazar:go-crypto:dcfb0a7ac018:davidlazar_go_crypto/vendor/github.com/davidlazar/go-crypto \ @@ -48,11 +47,8 @@ GH_TUPLE= Arceliar:ironwood:8951369625d0:arceliar_ironwood/vendor/github.com/Arc docker:go-units:v0.4.0:docker_go_units/vendor/github.com/docker/go-units \ flynn:noise:2492fe189ae6:flynn_noise/vendor/github.com/flynn/noise \ frankban:quicktest:v1.14.0:frankban_quicktest/vendor/github.com/frankban/quicktest \ - fsnotify:fsnotify:v1.4.9:fsnotify_fsnotify/vendor/github.com/fsnotify/fsnotify \ getsentry:sentry-go:v0.12.0:getsentry_sentry_go/vendor/github.com/getsentry/sentry-go \ go-macaroon:macaroon:v2.1.0:go_macaroon_macaroon/vendor/gopkg.in/macaroon.v2 \ - go-task:slim-sprig:348f09dbbbc0:go_task_slim_sprig/vendor/github.com/go-task/slim-sprig \ - go-tomb:tomb:dd632973f1e7:go_tomb_tomb/vendor/gopkg.in/tomb.v1 \ go-yaml:yaml:496545a6307b:go_yaml_yaml_1/vendor/gopkg.in/yaml.v3 \ go-yaml:yaml:v2.4.0:go_yaml_yaml/vendor/gopkg.in/yaml.v2 \ gogo:protobuf:v1.3.2:gogo_protobuf/vendor/github.com/gogo/protobuf \ @@ -64,7 +60,7 @@ GH_TUPLE= Arceliar:ironwood:8951369625d0:arceliar_ironwood/vendor/github.com/Arc golang:protobuf:v1.5.2:golang_protobuf/vendor/github.com/golang/protobuf \ golang:sys:4e6760a101f9:golang_sys/vendor/golang.org/x/sys \ golang:term:03fcf44c2211:golang_term/vendor/golang.org/x/term \ - golang:text:v0.3.7:golang_text/vendor/golang.org/x/text \ + golang:text:5bd84dd9b33b:golang_text/vendor/golang.org/x/text \ golang:time:f0f3c7e86c11:golang_time/vendor/golang.org/x/time \ golang:tools:316ba0b74098:golang_tools/vendor/golang.org/x/tools \ golang:xerrors:5ec99f83aff1:golang_xerrors/vendor/golang.org/x/xerrors \ @@ -93,7 +89,7 @@ GH_TUPLE= Arceliar:ironwood:8951369625d0:arceliar_ironwood/vendor/github.com/Arc jbenet:goprocess:v0.1.4:jbenet_goprocess/vendor/github.com/jbenet/goprocess \ juju:errors:3fe23663418f:juju_errors/vendor/github.com/juju/errors \ juju:testing:77eb13d6cad2:juju_testing/vendor/github.com/juju/testing \ - klauspost:compress:v1.13.4:klauspost_compress/vendor/github.com/klauspost/compress \ + klauspost:compress:v1.14.4:klauspost_compress/vendor/github.com/klauspost/compress \ koron:go-ssdp:2e1c40ed0b5d:koron_go_ssdp/vendor/github.com/koron/go-ssdp \ lib:pq:v1.10.4:lib_pq/vendor/github.com/lib/pq \ libp2p:go-addr-util:v0.0.2:libp2p_go_addr_util/vendor/github.com/libp2p/go-addr-util \ @@ -137,22 +133,18 @@ GH_TUPLE= Arceliar:ironwood:8951369625d0:arceliar_ironwood/vendor/github.com/Arc libp2p:go-tcp-transport:v0.2.1:libp2p_go_tcp_transport/vendor/github.com/libp2p/go-tcp-transport \ libp2p:go-ws-transport:v0.4.0:libp2p_go_ws_transport/vendor/github.com/libp2p/go-ws-transport \ libp2p:go-yamux:v2.0.0:libp2p_go_yamux_v2/vendor/github.com/libp2p/go-yamux/v2 \ - lucas-clemente:quic-go:v0.22.0:lucas_clemente_quic_go/vendor/github.com/lucas-clemente/quic-go \ - marten-seemann:qtls-go1-15:v0.1.5:marten_seemann_qtls_go1_15/vendor/github.com/marten-seemann/qtls-go1-15 \ - marten-seemann:qtls-go1-16:v0.1.4:marten_seemann_qtls_go1_16/vendor/github.com/marten-seemann/qtls-go1-16 \ - marten-seemann:qtls-go1-17:v0.1.0:marten_seemann_qtls_go1_17/vendor/github.com/marten-seemann/qtls-go1-17 \ matrix-org:dugong:66e6b1c67e2e:matrix_org_dugong/vendor/github.com/matrix-org/dugong \ matrix-org:go-http-js-libp2p:783164aeeda4:matrix_org_go_http_js_libp2p/vendor/github.com/matrix-org/go-http-js-libp2p \ matrix-org:go-sqlite3-js:b0d1ba599a6d:matrix_org_go_sqlite3_js/vendor/github.com/matrix-org/go-sqlite3-js \ matrix-org:gomatrix:be2af5ef2e16:matrix_org_gomatrix/vendor/github.com/matrix-org/gomatrix \ - matrix-org:gomatrixserverlib:e124bd7d7902:matrix_org_gomatrixserverlib/vendor/github.com/matrix-org/gomatrixserverlib \ - matrix-org:pinecone:0f0afd1a46aa:matrix_org_pinecone/vendor/github.com/matrix-org/pinecone \ + matrix-org:gomatrixserverlib:0980b7f341e0:matrix_org_gomatrixserverlib/vendor/github.com/matrix-org/gomatrixserverlib \ + matrix-org:pinecone:6fb077377278:matrix_org_pinecone/vendor/github.com/matrix-org/pinecone \ matrix-org:util:55161520e1d4:matrix_org_util/vendor/github.com/matrix-org/util \ mattn:go-sqlite3:v1.14.10:mattn_go_sqlite3/vendor/github.com/mattn/go-sqlite3 \ matttproud:golang_protobuf_extensions:c182affec369:matttproud_golang_protobuf_extensions/vendor/github.com/matttproud/golang_protobuf_extensions \ miekg:dns:v1.1.31:miekg_dns/vendor/github.com/miekg/dns \ minio:blake2b-simd:3f5f724cb5b1:minio_blake2b_simd/vendor/github.com/minio/blake2b-simd \ - minio:highwayhash:v1.0.1:minio_highwayhash/vendor/github.com/minio/highwayhash \ + minio:highwayhash:v1.0.2:minio_highwayhash/vendor/github.com/minio/highwayhash \ minio:sha256-simd:v0.1.1:minio_sha256_simd/vendor/github.com/minio/sha256-simd \ moby:moby:v20.10.12:moby_moby/vendor/github.com/docker/docker \ morikuni:aec:v1.0.0:morikuni_aec/vendor/github.com/morikuni/aec \ @@ -170,14 +162,14 @@ GH_TUPLE= Arceliar:ironwood:8951369625d0:arceliar_ironwood/vendor/github.com/Arc nats-io:jwt:58e87895b296:nats_io_jwt_v2/vendor/github.com/nats-io/jwt \ nats-io:nkeys:v0.3.0:nats_io_nkeys/vendor/github.com/nats-io/nkeys \ nats-io:nuid:v1.0.1:nats_io_nuid/vendor/github.com/nats-io/nuid \ - neilalexander:nats-server:087330ed46ad:neilalexander_nats_server_v2/vendor/github.com/nats-io/nats-server/v2 \ + neilalexander:nats-server:e2e4a244f30e:neilalexander_nats_server_v2/vendor/github.com/nats-io/nats-server/v2 \ neilalexander:nats.go:f4ddebe1061c:neilalexander_nats_go/vendor/github.com/nats-io/nats.go \ neilalexander:utp:54ae7b1cd5f9:neilalexander_utp/vendor/github.com/neilalexander/utp \ nfnt:resize:83c6a9932646:nfnt_resize/vendor/github.com/nfnt/resize \ ngrok:sqlmw:9d16fdc47b31:ngrok_sqlmw/vendor/github.com/ngrok/sqlmw \ nhooyr:websocket:v1.8.7:nhooyr_websocket/vendor/nhooyr.io/websocket \ - nxadm:tail:v1.4.8:nxadm_tail/vendor/github.com/nxadm/tail \ onsi:ginkgo:v1.16.4:onsi_ginkgo/vendor/github.com/onsi/ginkgo \ + onsi:gomega:v1.13.0:onsi_gomega/vendor/github.com/onsi/gomega \ opencontainers:go-digest:v1.0.0:opencontainers_go_digest/vendor/github.com/opencontainers/go-digest \ opencontainers:image-spec:v1.0.2:opencontainers_image_spec/vendor/github.com/opencontainers/image-spec \ opentracing:opentracing-go:v1.2.0:opentracing_opentracing_go/vendor/github.com/opentracing/opentracing-go \ @@ -205,7 +197,7 @@ GH_TUPLE= Arceliar:ironwood:8951369625d0:arceliar_ironwood/vendor/github.com/Arc whyrusleeping:mdns:b9b60ed33aa9:whyrusleeping_mdns/vendor/github.com/whyrusleeping/mdns \ whyrusleeping:multiaddr-filter:e903e4adabd7:whyrusleeping_multiaddr_filter/vendor/github.com/whyrusleeping/multiaddr-filter \ whyrusleeping:timecache:cfcb2f1abfee:whyrusleeping_timecache/vendor/github.com/whyrusleeping/timecache \ - yggdrasil-network:yggdrasil-go:v0.4.2:yggdrasil_network_yggdrasil_go/vendor/github.com/yggdrasil-network/yggdrasil-go + yggdrasil-network:yggdrasil-go:v0.4.3:yggdrasil_network_yggdrasil_go/vendor/github.com/yggdrasil-network/yggdrasil-go post-build: cd ${WRKSRC} && \ diff --git a/net-im/dendrite/distinfo b/net-im/dendrite/distinfo index ec8dc1488d8..b3055fb9dfa 100644 --- a/net-im/dendrite/distinfo +++ b/net-im/dendrite/distinfo @@ -1,6 +1,6 @@ -TIMESTAMP = 1646414224 -SHA256 (matrix-org-dendrite-v0.6.5_GH0.tar.gz) = b74170bd3f81e2f22ff4673bc632e37afeef1121fa90acb03ed2eed17a387133 -SIZE (matrix-org-dendrite-v0.6.5_GH0.tar.gz) = 1093602 +TIMESTAMP = 1648220759 +SHA256 (matrix-org-dendrite-v0.7.0_GH0.tar.gz) = 26c378bff1738b0e8422c7bd425be4763f22225f422ae1854c78644e4ca8bc42 +SIZE (matrix-org-dendrite-v0.7.0_GH0.tar.gz) = 1098226 SHA256 (Arceliar-ironwood-8951369625d0_GH0.tar.gz) = 17385698026bd8afb569859055604835f061f1cef77eda2372b2a0c7c4699939 SIZE (Arceliar-ironwood-8951369625d0_GH0.tar.gz) = 40531 SHA256 (Arceliar-phony-dde1a8dca979_GH0.tar.gz) = bcc79621c9da979c1d357a29f1924d73c86c57c0e901652312d742cbc39e5962 @@ -23,8 +23,6 @@ SHA256 (census-instrumentation-opencensus-go-v0.22.4_GH0.tar.gz) = f230d965b9ddf SIZE (census-instrumentation-opencensus-go-v0.22.4_GH0.tar.gz) = 170499 SHA256 (cespare-xxhash-v2.1.2_GH0.tar.gz) = 471399cfcf0b1bd188add8d34435b91e7011cedbfe76e29687803a9d74320352 SIZE (cespare-xxhash-v2.1.2_GH0.tar.gz) = 11244 -SHA256 (cheekybits-genny-v1.0.0_GH0.tar.gz) = 6982bf513333fb3ee3e6e0633500a3800fb6a3d6beb9e6c6084a96c85a49dd73 -SIZE (cheekybits-genny-v1.0.0_GH0.tar.gz) = 15585 SHA256 (codeclysm-extract-v2.2.0_GH0.tar.gz) = 79b585de51af0bfd715714cc111794ad1afaaae79f3543e66f7cad115683f998 SIZE (codeclysm-extract-v2.2.0_GH0.tar.gz) = 73694 SHA256 (containerd-containerd-v1.5.9_GH0.tar.gz) = 40c9767af3e87f2c36adf2f563f0a8374e80b30bd2b7aa80058c85912406cef4 @@ -41,16 +39,10 @@ SHA256 (flynn-noise-2492fe189ae6_GH0.tar.gz) = 6a75fc9f88987d76f79226e6728c7f8d7 SIZE (flynn-noise-2492fe189ae6_GH0.tar.gz) = 208627 SHA256 (frankban-quicktest-v1.14.0_GH0.tar.gz) = ae473055eb7a97ea8d735fc94c3aab25966267bf69e31adebda586d499ac286f SIZE (frankban-quicktest-v1.14.0_GH0.tar.gz) = 39368 -SHA256 (fsnotify-fsnotify-v1.4.9_GH0.tar.gz) = 4f888b1cb132026227826751d156c0a2958e7d492e5e38386cde8848ef494dcb -SIZE (fsnotify-fsnotify-v1.4.9_GH0.tar.gz) = 31900 SHA256 (getsentry-sentry-go-v0.12.0_GH0.tar.gz) = 4dc91e9269815fb3ffcc5bb56a989728f0a67065b5943056a8e309ba5a8840d7 SIZE (getsentry-sentry-go-v0.12.0_GH0.tar.gz) = 120059 SHA256 (go-macaroon-macaroon-v2.1.0_GH0.tar.gz) = cb7f677a7ba287ce24c3523b4b3e121c0d64096fa72cc9adeaf813b1559ecc40 SIZE (go-macaroon-macaroon-v2.1.0_GH0.tar.gz) = 25328 -SHA256 (go-task-slim-sprig-348f09dbbbc0_GH0.tar.gz) = c52607aad259efae7f725eadf0493933c206d36b776df117f94f6b2eeb4fd8b1 -SIZE (go-task-slim-sprig-348f09dbbbc0_GH0.tar.gz) = 40034 -SHA256 (go-tomb-tomb-dd632973f1e7_GH0.tar.gz) = b67f4ee9324a78176bc3196fe262388696aeb3f31d9879d498200219f9c4c554 -SIZE (go-tomb-tomb-dd632973f1e7_GH0.tar.gz) = 3631 SHA256 (go-yaml-yaml-496545a6307b_GH0.tar.gz) = ed0e11dc14bbbd4127031d7e8b9e58dad885e2c44a16359d2f64b71d1d1f692a SIZE (go-yaml-yaml-496545a6307b_GH0.tar.gz) = 90156 SHA256 (go-yaml-yaml-v2.4.0_GH0.tar.gz) = d8e94679e5fff6bd1a35e10241543929a5f3da44f701755babf99b3daf0faac0 @@ -73,8 +65,8 @@ SHA256 (golang-sys-4e6760a101f9_GH0.tar.gz) = 03a0fe05c767ee371318e94f05d0fb59ce SIZE (golang-sys-4e6760a101f9_GH0.tar.gz) = 1259262 SHA256 (golang-term-03fcf44c2211_GH0.tar.gz) = 1919967c8fbf15de9261a2315d90072357524ee557d7df106b426def02a8e5f3 SIZE (golang-term-03fcf44c2211_GH0.tar.gz) = 14972 -SHA256 (golang-text-v0.3.7_GH0.tar.gz) = 7cab2f6c3133ac1d422edd952b0dd2082fa55a73c2663fb2defd9bf83d649b26 -SIZE (golang-text-v0.3.7_GH0.tar.gz) = 8354718 +SHA256 (golang-text-5bd84dd9b33b_GH0.tar.gz) = cda300bc590c52f2d5a98eecb4a1dbb9f529d62fc21c88445881ac2ae0113d2c +SIZE (golang-text-5bd84dd9b33b_GH0.tar.gz) = 8363403 SHA256 (golang-time-f0f3c7e86c11_GH0.tar.gz) = 421890dd6e4b7e8d5ca68cc163c8b3b08436898664534aedfefd8602e11a4e53 SIZE (golang-time-f0f3c7e86c11_GH0.tar.gz) = 9626 SHA256 (golang-tools-316ba0b74098_GH0.tar.gz) = 77a58b4c8a44e0b460142d33320af948379fc158a786c18a922afee023443b42 @@ -131,8 +123,8 @@ SHA256 (juju-errors-3fe23663418f_GH0.tar.gz) = 38de60613728d6728f5b6f322369b9c07 SIZE (juju-errors-3fe23663418f_GH0.tar.gz) = 17606 SHA256 (juju-testing-77eb13d6cad2_GH0.tar.gz) = 3c941e474ff73bcf1dd74f05bca7b7c9c7b857d2e1307746ed5c54f17861eee4 SIZE (juju-testing-77eb13d6cad2_GH0.tar.gz) = 69731 -SHA256 (klauspost-compress-v1.13.4_GH0.tar.gz) = e25c9621fd3306e2e2939c8aaf79f74929a6c415e75a34e29536cac2d3ec57ab -SIZE (klauspost-compress-v1.13.4_GH0.tar.gz) = 15446158 +SHA256 (klauspost-compress-v1.14.4_GH0.tar.gz) = 30480619257f7d40fcbfd61b59c34fd36e85220ac2eee400c493b06c37f53740 +SIZE (klauspost-compress-v1.14.4_GH0.tar.gz) = 15502117 SHA256 (koron-go-ssdp-2e1c40ed0b5d_GH0.tar.gz) = b8d09aa1efb96060b35395e3935a4ec16e5a032c5092ecd4b6a1761dee91ca97 SIZE (koron-go-ssdp-2e1c40ed0b5d_GH0.tar.gz) = 8735 SHA256 (lib-pq-v1.10.4_GH0.tar.gz) = 3d56bb8c6b50d86b6d053053a7da831a07e9da39205ffa23dfbf1487234ab742 @@ -219,14 +211,6 @@ SHA256 (libp2p-go-ws-transport-v0.4.0_GH0.tar.gz) = 235870ed1e72456299f2527fd131 SIZE (libp2p-go-ws-transport-v0.4.0_GH0.tar.gz) = 28140 SHA256 (libp2p-go-yamux-v2.0.0_GH0.tar.gz) = 4dedc0a8385a421a3d0aa9928c624104916489f4a008a39be2c614b03cc903f0 SIZE (libp2p-go-yamux-v2.0.0_GH0.tar.gz) = 29799 -SHA256 (lucas-clemente-quic-go-v0.22.0_GH0.tar.gz) = 39fef6471a03d66495df3aabb1ede5258853798ea62caa7863b5d7e24cf6cf13 -SIZE (lucas-clemente-quic-go-v0.22.0_GH0.tar.gz) = 519237 -SHA256 (marten-seemann-qtls-go1-15-v0.1.5_GH0.tar.gz) = 48739062bf17180f55a6e49d9c6a90ba7611a4a492327b8a69a2a73459bc73dd -SIZE (marten-seemann-qtls-go1-15-v0.1.5_GH0.tar.gz) = 413602 -SHA256 (marten-seemann-qtls-go1-16-v0.1.4_GH0.tar.gz) = e6166cfc140acb6cfc11526444640e31ed47cf8b9c31f5812904a7735ecd8aa6 -SIZE (marten-seemann-qtls-go1-16-v0.1.4_GH0.tar.gz) = 415515 -SHA256 (marten-seemann-qtls-go1-17-v0.1.0_GH0.tar.gz) = e1c22bac3a614f31b0ca5d32c2a5aeee9b5032f3b23b8951810c21e990ed3997 -SIZE (marten-seemann-qtls-go1-17-v0.1.0_GH0.tar.gz) = 421611 SHA256 (matrix-org-dugong-66e6b1c67e2e_GH0.tar.gz) = b6ebcaad5c8f82d6cb17e0e7442bc3adaa5c09df8aa9679311cf6b015193fb13 SIZE (matrix-org-dugong-66e6b1c67e2e_GH0.tar.gz) = 8736 SHA256 (matrix-org-go-http-js-libp2p-783164aeeda4_GH0.tar.gz) = 9ebf76b055a8d3583bf803f57c4ac9291e2e3ba0bc138484669d5ebda16c60ef @@ -235,10 +219,10 @@ SHA256 (matrix-org-go-sqlite3-js-b0d1ba599a6d_GH0.tar.gz) = 620614ff4e66d3791bcb SIZE (matrix-org-go-sqlite3-js-b0d1ba599a6d_GH0.tar.gz) = 16622 SHA256 (matrix-org-gomatrix-be2af5ef2e16_GH0.tar.gz) = f1ff1f93dab387847a714636182392cbacd19f13f81a25295262d8a5f79766ef SIZE (matrix-org-gomatrix-be2af5ef2e16_GH0.tar.gz) = 26922 -SHA256 (matrix-org-gomatrixserverlib-e124bd7d7902_GH0.tar.gz) = 707dd31ee1c32d75d2c701d3d38e4db9b2a9dd8a46f6885d90028fbbfd527bfc -SIZE (matrix-org-gomatrixserverlib-e124bd7d7902_GH0.tar.gz) = 130167 -SHA256 (matrix-org-pinecone-0f0afd1a46aa_GH0.tar.gz) = 4e8e420e99f68f13b748e4faab486d942f1def6fb615e950322e4f7dfece18ed -SIZE (matrix-org-pinecone-0f0afd1a46aa_GH0.tar.gz) = 289351 +SHA256 (matrix-org-gomatrixserverlib-0980b7f341e0_GH0.tar.gz) = f338ec5fb46ba74f8bd18a993f909292a353a99b6366fd8db14a6dbb03dcbc31 +SIZE (matrix-org-gomatrixserverlib-0980b7f341e0_GH0.tar.gz) = 130714 +SHA256 (matrix-org-pinecone-6fb077377278_GH0.tar.gz) = ae33f498df1cdc9637e6270d1cf4545cbe48a091198f65f9f6af55a494782ae2 +SIZE (matrix-org-pinecone-6fb077377278_GH0.tar.gz) = 290661 SHA256 (matrix-org-util-55161520e1d4_GH0.tar.gz) = 4ad3e66b9993f39c82eac1fca7f6549472c8aaa61227914209678cd3d7d6a3a6 SIZE (matrix-org-util-55161520e1d4_GH0.tar.gz) = 11499 SHA256 (mattn-go-sqlite3-v1.14.10_GH0.tar.gz) = 57bbbd9acc6d7e7fed03068092f37b2f4546d3dabb2e31c866bdcf13c55d81bd @@ -249,8 +233,8 @@ SHA256 (miekg-dns-v1.1.31_GH0.tar.gz) = f40eb494b6b14ba8796a3d561861bb7510031f29 SIZE (miekg-dns-v1.1.31_GH0.tar.gz) = 188593 SHA256 (minio-blake2b-simd-3f5f724cb5b1_GH0.tar.gz) = 615fb58e96e1814b2df621a86dae7d271de47b6192608e85f11f55b830460d01 SIZE (minio-blake2b-simd-3f5f724cb5b1_GH0.tar.gz) = 69929 -SHA256 (minio-highwayhash-v1.0.1_GH0.tar.gz) = aa1fd01cd0c16d362e8e8a522f1bed6ccff76fb55107e6f5478b1b7bd329e951 -SIZE (minio-highwayhash-v1.0.1_GH0.tar.gz) = 22967 +SHA256 (minio-highwayhash-v1.0.2_GH0.tar.gz) = 5a89e6f3e77cb77c818c5ce0316812d544e588456fce2e55e02f48d52fa9c5d2 +SIZE (minio-highwayhash-v1.0.2_GH0.tar.gz) = 23056 SHA256 (minio-sha256-simd-v0.1.1_GH0.tar.gz) = bc1f1e80dc1291f6d26933658155488cc895f6b8a4944479d462cf5a79112109 SIZE (minio-sha256-simd-v0.1.1_GH0.tar.gz) = 65024 SHA256 (moby-moby-v20.10.12_GH0.tar.gz) = a8ee80d31c7b74f687a837cd2a8570578f118179fba0844c5ee88f90fe180155 @@ -285,8 +269,8 @@ SHA256 (nats-io-nkeys-v0.3.0_GH0.tar.gz) = ea281df5dc380b84f61c3712b367f02957cff SIZE (nats-io-nkeys-v0.3.0_GH0.tar.gz) = 19986 SHA256 (nats-io-nuid-v1.0.1_GH0.tar.gz) = a0b4fe5b40781add2a9fdb5d723313be5f5d11c1a79ea1dd2671278826ef078d SIZE (nats-io-nuid-v1.0.1_GH0.tar.gz) = 7529 -SHA256 (neilalexander-nats-server-087330ed46ad_GH0.tar.gz) = 79ce2a885002074cdc6eae7fdc5b8322d8e312b05139b88dced7aac872b2bdc9 -SIZE (neilalexander-nats-server-087330ed46ad_GH0.tar.gz) = 1383115 +SHA256 (neilalexander-nats-server-e2e4a244f30e_GH0.tar.gz) = b599d7b16e352ed0c4c2934cab13d882c86d6234974e61fa0695b3693d629302 +SIZE (neilalexander-nats-server-e2e4a244f30e_GH0.tar.gz) = 1398309 SHA256 (neilalexander-nats.go-f4ddebe1061c_GH0.tar.gz) = aa60dc9a21bfbf45d632e441dae48bd56b0282fdfe0f6877bfa1066d43584333 SIZE (neilalexander-nats.go-f4ddebe1061c_GH0.tar.gz) = 240693 SHA256 (neilalexander-utp-54ae7b1cd5f9_GH0.tar.gz) = a7203304a6a957313f76b13005cc38fb5e97d327edaba7ea236a6cd91a29cd1a @@ -297,10 +281,10 @@ SHA256 (ngrok-sqlmw-9d16fdc47b31_GH0.tar.gz) = ba1ea086c1511bab2d68bbb1edf52085e SIZE (ngrok-sqlmw-9d16fdc47b31_GH0.tar.gz) = 14152 SHA256 (nhooyr-websocket-v1.8.7_GH0.tar.gz) = b2417df7fe0e6068b751fd3ede8d5f2b86658cf34c52438827ac1802f34a4e82 SIZE (nhooyr-websocket-v1.8.7_GH0.tar.gz) = 51286 -SHA256 (nxadm-tail-v1.4.8_GH0.tar.gz) = 8208362046819275a0809000dceacbd7b2a7caa07bcd6547dd2ff9b2104fa56c -SIZE (nxadm-tail-v1.4.8_GH0.tar.gz) = 1255770 SHA256 (onsi-ginkgo-v1.16.4_GH0.tar.gz) = f4b9e08e035f5521a4572ce7bb534a3ba0d3c90b0a2d1ae84b9359d59b6ca486 SIZE (onsi-ginkgo-v1.16.4_GH0.tar.gz) = 164036 +SHA256 (onsi-gomega-v1.13.0_GH0.tar.gz) = 4bd0dd71e94ce069628ea705f06701460cf057b026b2b341ee44ab1b30ef9f59 +SIZE (onsi-gomega-v1.13.0_GH0.tar.gz) = 127839 SHA256 (opencontainers-go-digest-v1.0.0_GH0.tar.gz) = 1e74706d265c92f62793af741e322163f3c08afa66f5a7926c9b9ccb44fed230 SIZE (opencontainers-go-digest-v1.0.0_GH0.tar.gz) = 24456 SHA256 (opencontainers-image-spec-v1.0.2_GH0.tar.gz) = 432211c0a0c6663b48434f0a5655404b7ccbff3aa81c60c8649d591880b360c0 @@ -355,5 +339,5 @@ SHA256 (whyrusleeping-multiaddr-filter-e903e4adabd7_GH0.tar.gz) = ac4f174ddaa4d7 SIZE (whyrusleeping-multiaddr-filter-e903e4adabd7_GH0.tar.gz) = 2696 SHA256 (whyrusleeping-timecache-cfcb2f1abfee_GH0.tar.gz) = 87632e428950f41ee6f61b267e1238e811f69c177de4c6ff0a27003a1227c2a4 SIZE (whyrusleeping-timecache-cfcb2f1abfee_GH0.tar.gz) = 1977 -SHA256 (yggdrasil-network-yggdrasil-go-v0.4.2_GH0.tar.gz) = ac21671e2ed0bf5313d277dead703190c6bdccfe8bbb4ccb4ad8d1258ff79689 -SIZE (yggdrasil-network-yggdrasil-go-v0.4.2_GH0.tar.gz) = 91257 +SHA256 (yggdrasil-network-yggdrasil-go-v0.4.3_GH0.tar.gz) = db793089eddfef628d30055d8b06a6a09f98bb38c3b4ede5265a4ad6eaa80d52 +SIZE (yggdrasil-network-yggdrasil-go-v0.4.3_GH0.tar.gz) = 96364 diff --git a/net-im/folks/Makefile b/net-im/folks/Makefile index 9971fc310f0..069c4206019 100644 --- a/net-im/folks/Makefile +++ b/net-im/folks/Makefile @@ -2,7 +2,7 @@ PORTNAME= folks PORTVERSION= 0.15.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= net-im MASTER_SITES= GNOME diff --git a/net-im/fractal/Makefile b/net-im/fractal/Makefile index b2b78cc40a1..d44cbb3ea8d 100644 --- a/net-im/fractal/Makefile +++ b/net-im/fractal/Makefile @@ -1,6 +1,6 @@ PORTNAME= fractal DISTVERSION= 4.4.0 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= net-im MASTER_SITES= https://gitlab.gnome.org/World/fractal/uploads/${GL_HASH}/ diff --git a/net-im/kopete/Makefile b/net-im/kopete/Makefile index 841eed8faab..5f1b4f97d7e 100644 --- a/net-im/kopete/Makefile +++ b/net-im/kopete/Makefile @@ -1,5 +1,6 @@ PORTNAME= kopete PORTVERSION= ${KDE_APPLICATIONS_VERSION} +PORTREVISION= 1 CATEGORIES= net-im kde kde-applications MAINTAINER= kde@FreeBSD.org diff --git a/net-im/libaccounts-glib/Makefile b/net-im/libaccounts-glib/Makefile index 0577a92592a..479c6a5fad5 100644 --- a/net-im/libaccounts-glib/Makefile +++ b/net-im/libaccounts-glib/Makefile @@ -1,5 +1,6 @@ PORTNAME= libaccounts-glib DISTVERSION= 1.25 +PORTREVISION= 1 CATEGORIES= net-im DIST_SUBDIR= KDE/${GL_ACCOUNT} diff --git a/net-im/libpurple/Makefile b/net-im/libpurple/Makefile index 6b840dda572..fb455e64ef3 100644 --- a/net-im/libpurple/Makefile +++ b/net-im/libpurple/Makefile @@ -2,7 +2,7 @@ PORTNAME?= libpurple PORTVERSION= 2.14.8 -PORTREVISION?= 1 +PORTREVISION?= 2 CATEGORIES?= net-im MASTER_SITES= SF/pidgin/Pidgin/${PORTVERSION} DISTNAME= pidgin-${PORTVERSION} diff --git a/net-im/pidgin-sipe/Makefile b/net-im/pidgin-sipe/Makefile index 5b3a76d075d..536ea75e6eb 100644 --- a/net-im/pidgin-sipe/Makefile +++ b/net-im/pidgin-sipe/Makefile @@ -2,6 +2,7 @@ PORTNAME= pidgin-sipe PORTVERSION= 1.24.0 +PORTREVISION= 1 CATEGORIES= net-im MASTER_SITES= SF/sipe/sipe/${DISTNAME} diff --git a/net-im/pidgin-twitter/Makefile b/net-im/pidgin-twitter/Makefile index da568f7035f..893170e67b6 100644 --- a/net-im/pidgin-twitter/Makefile +++ b/net-im/pidgin-twitter/Makefile @@ -2,7 +2,7 @@ PORTNAME= pidgin-twitter PORTVERSION= 0.9.2 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= net-im MASTER_SITES= http://www.honeyplanet.jp/ diff --git a/net-im/py-matrix-synapse/Makefile b/net-im/py-matrix-synapse/Makefile index 12b3aaac05a..12cf37a11a4 100644 --- a/net-im/py-matrix-synapse/Makefile +++ b/net-im/py-matrix-synapse/Makefile @@ -2,7 +2,7 @@ PORTNAME= matrix-synapse DISTVERSIONPREFIX= v -DISTVERSION= 1.53.0 +DISTVERSION= 1.55.2 CATEGORIES= net-im python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -37,11 +37,12 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}jsonschema>=3.0.0:devel/py-jsonschema@${PY_F ${PYTHON_PKGNAMEPREFIX}prometheus-client>=0.4.0:net-mgmt/py-prometheus-client@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}attrs>19.2.0:devel/py-attrs@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}netaddr>=0.7.18:net/py-netaddr@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}Jinja2>=2.9:devel/py-Jinja2@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}Jinja2<3.1.0:devel/py-Jinja2@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}bleach>=1.4.3:www/py-bleach@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}typing-extensions>=3.7.4:devel/py-typing-extensions@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}ijson>=3.1:devel/py-ijson@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}typing-extensions>=3.10.0:devel/py-typing-extensions@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}ijson>=3.1.4:devel/py-ijson@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pyjwt>=1.7.0:www/py-pyjwt@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}packaging>=16.1:devel/py-packaging@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}matrix-common>=1.1.0:devel/py-matrix-common@${PY_FLAVOR} \ ${LOCALBASE}/share/certs/ca-root-nss.crt:security/ca_root_nss diff --git a/net-im/py-matrix-synapse/distinfo b/net-im/py-matrix-synapse/distinfo index c8613b3ff46..4892f2cafbf 100644 --- a/net-im/py-matrix-synapse/distinfo +++ b/net-im/py-matrix-synapse/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645551924 -SHA256 (matrix-org-synapse-v1.53.0_GH0.tar.gz) = e9fb5c8d1178996e677f6ce8f44c11d9294da0a06297fffa34706879870f9bf4 -SIZE (matrix-org-synapse-v1.53.0_GH0.tar.gz) = 7696244 +TIMESTAMP = 1648214024 +SHA256 (matrix-org-synapse-v1.55.2_GH0.tar.gz) = 8e06cb264d1631d2518f2f726c89d62c5ed7148b73fbb8190a9c120d3166e7c4 +SIZE (matrix-org-synapse-v1.55.2_GH0.tar.gz) = 7739760 diff --git a/net-im/py-matrix-synapse/files/patch-synapse_python__dependencies.py b/net-im/py-matrix-synapse/files/patch-synapse_python__dependencies.py index 342f69ef66b..f6d0e485233 100644 --- a/net-im/py-matrix-synapse/files/patch-synapse_python__dependencies.py +++ b/net-im/py-matrix-synapse/files/patch-synapse_python__dependencies.py @@ -1,11 +1,11 @@ ---- synapse/python_dependencies.py.orig 2022-02-22 11:32:11 UTC +--- synapse/python_dependencies.py.orig 2022-03-24 17:18:43 UTC +++ synapse/python_dependencies.py -@@ -86,7 +87,7 @@ REQUIREMENTS = [ - "typing-extensions>=3.7.4", +@@ -80,7 +80,7 @@ REQUIREMENTS = [ + "typing-extensions>=3.10.0", # We enforce that we have a `cryptography` version that bundles an `openssl` # with the latest security patches. - "cryptography>=3.4.7", + "cryptography", - "ijson>=3.1", + # ijson 3.1.4 fixes a bug with "." in property names + "ijson>=3.1.4", "matrix-common~=1.1.0", - ] diff --git a/net-im/py-matrix-synapse/files/pkg-message.in b/net-im/py-matrix-synapse/files/pkg-message.in index 9bf3ac75d6c..5280b1c46a3 100644 --- a/net-im/py-matrix-synapse/files/pkg-message.in +++ b/net-im/py-matrix-synapse/files/pkg-message.in @@ -31,6 +31,17 @@ details on this issue. [0] https://matrix-org.github.io/synapse/latest/turn-howto.html +EOM +} +{ type: upgrade + maximum_version: 1.55.0 + message: <= 8 +-if version.major == 0 and version.minor < 5: raise Exception, "Unsupported Version of Twisted Words" ++if version.major == 0 and version.minor < 5: raise Exception("Unsupported Version of Twisted Words") + + from twisted.words.xish import domish + from twisted.words.protocols.jabber import xmlstream +@@ -64,11 +64,11 @@ class JabberClientFactory(xmlstream.XmlStreamFactory): + + + def rawDataIn(self, buf): +- log.msg("RECV: %s" % unicode(buf, 'utf-8').encode('ascii', 'replace')) ++ log.msg("RECV: %s" % str(buf, 'utf-8').encode('ascii', 'replace')) + + + def rawDataOut(self, buf): +- log.msg("SEND: %s" % unicode(buf, 'utf-8').encode('ascii', 'replace')) ++ log.msg("SEND: %s" % str(buf, 'utf-8').encode('ascii', 'replace')) + + + class PunjabAuthenticator(xmlstream.ConnectAuthenticator): +--- punjab/session.py.orig 2013-07-24 20:35:00 UTC ++++ punjab/session.py +@@ -21,7 +21,7 @@ from punjab import jabber + from punjab.xmpp import ns + + import time +-import error ++from . import error + + try: + from twisted.internet import ssl +@@ -40,7 +40,7 @@ class XMPPClientConnector(SRVConnector): + """ + def __init__(self, client_reactor, domain, factory): + """ Init """ +- if isinstance(domain, unicode): ++ if isinstance(domain, str): + warnings.warn( + "Domain argument to XMPPClientConnector should be bytes, " + "not unicode", +@@ -75,7 +75,7 @@ def make_session(pint, attrs, session_type='BOSH'): + log.msg('================================== %s connect to %s:%s ==================================' % (str(time.time()),s.hostname,s.port)) + + connect_srv = s.connect_srv +- if attrs.has_key('route'): ++ if 'route' in attrs: + connect_srv = False + if s.hostname in ['localhost', '127.0.0.1']: + connect_srv = False +@@ -119,7 +119,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + """ + Initialize the session + """ +- if attrs.has_key('charset'): ++ if 'charset' in attrs: + self.charset = str(attrs['charset']) + else: + self.charset = 'utf-8' +@@ -152,7 +152,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + self.waiting_requests = [] + self.use_raw = attrs.get('raw', False) + +- self.raw_buffer = u"" ++ self.raw_buffer = "" + self.xmpp_node = '' + self.success = 0 + self.mechanisms = [] +@@ -173,20 +173,20 @@ class Session(jabber.JabberClientFactory, server.Sessi + self.hold = int(attrs.get('hold', 0)) + self.inactivity = int(attrs.get('inactivity', 900)) # default inactivity 15 mins + +- if attrs.has_key('window'): ++ if 'window' in attrs: + self.window = int(attrs['window']) + else: + self.window = self.hold + 2 + +- if attrs.has_key('polling'): ++ if 'polling' in attrs: + self.polling = int(attrs['polling']) + else: + self.polling = 0 + +- if attrs.has_key('port'): ++ if 'port' in attrs: + self.port = int(attrs['port']) + +- if attrs.has_key('hostname'): ++ if 'hostname' in attrs: + self.hostname = attrs['hostname'] + else: + self.hostname = self.to +@@ -195,10 +195,10 @@ class Session(jabber.JabberClientFactory, server.Sessi + + self.connect_srv = getattr(pint, 'connect_srv', True) + +- self.secure = attrs.has_key('secure') and attrs['secure'] == 'true' ++ self.secure = 'secure' in attrs and attrs['secure'] == 'true' + self.authenticator.useTls = self.secure + +- if attrs.has_key('route'): ++ if 'route' in attrs: + if attrs['route'].startswith("xmpp:"): + self.route = attrs['route'][5:] + if self.route.startswith("//"): +@@ -246,7 +246,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + log.err() + if self.use_raw and self.authid: + if type(buf) == type(''): +- buf = unicode(buf, 'utf-8') ++ buf = str(buf, 'utf-8') + # add some raw data + self.raw_buffer = self.raw_buffer + buf + +@@ -402,7 +402,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + + self.authid = self.xmlstream.sid + +- if not self.attrs.has_key('no_events'): ++ if 'no_events' not in self.attrs: + + self.xmlstream.addOnetimeObserver("/auth", self.stanzaHandler) + self.xmlstream.addOnetimeObserver("/response", self.stanzaHandler) +@@ -461,7 +461,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + if len(self.waiting_requests) > 0: + self.returnWaitingRequests() + self.elems = [] # reset elems +- self.raw_buffer = u"" # reset raw buffer, features should not be in it ++ self.raw_buffer = "" # reset raw buffer, features should not be in it + + def bindHandler(self, stz): + """bind debugger for punjab, this is temporary! """ +@@ -478,7 +478,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + stz.prefixes = ns.XMPP_PREFIXES + if self.use_raw and self.authid: + stz = domish.SerializedXML(self.raw_buffer) +- self.raw_buffer = u"" ++ self.raw_buffer = "" + + self.elems.append(stz) + if self.waiting_requests and len(self.waiting_requests) > 0: +@@ -534,7 +534,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + else: # need to wait for a new request and then expire + do_expire = False + +- if self.pint and self.pint.sessions.has_key(self.sid): ++ if self.pint and self.sid in self.pint.sessions: + if do_expire: + try: + self.expire() +@@ -573,7 +573,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + else: # need to wait for a new request and then expire + do_expire = False + +- if self.pint and self.pint.sessions.has_key(self.sid): ++ if self.pint and self.sid in self.pint.sessions: + if do_expire: + try: + self.expire() +@@ -607,8 +607,8 @@ class Session(jabber.JabberClientFactory, server.Sessi + else: + observers = self.xmlstream._xpathObservers + emptyLists = [] +- for priority, priorityObservers in observers.iteritems(): +- for query, callbacklist in priorityObservers.iteritems(): ++ for priority, priorityObservers in observers.items(): ++ for query, callbacklist in priorityObservers.items(): + callbacklist.callbacks = [] + emptyLists.append((priority, query)) + +@@ -663,7 +663,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + wait += self.wait # if we have pending requests we need to add the wait time + + if time.time() - self.lastModified > wait+(0.1): +- if self.site.sessions.has_key(self.uid): ++ if self.uid in self.site.sessions: + self.terminate() + else: + pass +@@ -673,9 +673,9 @@ class Session(jabber.JabberClientFactory, server.Sessi + + + def _cacheData(self, rid, data): +- if len(self.cache_data.keys())>=3: ++ if len(list(self.cache_data.keys()))>=3: + # remove the first one in +- keys = self.cache_data.keys() ++ keys = list(self.cache_data.keys()) + keys.sort() + del self.cache_data[keys[0]] + +@@ -686,11 +686,11 @@ class Session(jabber.JabberClientFactory, server.Sessi + + def _sessionResultEvent(self, iq): + """ """ +- if len(self.waiting_requests)>0: +- wr = self.waiting_requests.pop(0) +- d = wr.deferred +- else: +- d = None ++ if len(self.waiting_requests)>0: ++ wr = self.waiting_requests.pop(0) ++ d = wr.deferred ++ else: ++ d = None + + if iq["type"] == "result": + if d: +@@ -710,7 +710,7 @@ class Session(jabber.JabberClientFactory, server.Sessi + + self.authenticator._reset() + if self.use_raw: +- self.raw_buffer = u"" ++ self.raw_buffer = "" + + + +--- punjab/xmpp/server.py.orig 2012-07-15 00:26:12 UTC ++++ punjab/xmpp/server.py +@@ -136,7 +136,7 @@ class XMPPServerProtocol(xmlstream.XmlStream): + + + def streamConnected(self, elm): +- print "stream connected" ++ print("stream connected") + + def streamStarted(self, elm): + """stream has started, we need to respond +@@ -162,8 +162,8 @@ class XMPPServerProtocol(xmlstream.XmlStream): + def onElement(self, element): + try: + xmlstream.XmlStream.onElement(self, element) +- except Exception, e: +- print "Exception!", e ++ except Exception as e: ++ print("Exception!", e) + raise e + + def onDocumentEnd(self): diff --git a/net-im/telepathy-farstream/Makefile b/net-im/telepathy-farstream/Makefile index 3d362bc1dc2..02eee15eec3 100644 --- a/net-im/telepathy-farstream/Makefile +++ b/net-im/telepathy-farstream/Makefile @@ -2,7 +2,7 @@ PORTNAME= telepathy-farstream PORTVERSION= 0.6.2 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= net-im MASTER_SITES= http://telepathy.freedesktop.org/releases/${PORTNAME}/ diff --git a/net-im/telepathy-gabble/Makefile b/net-im/telepathy-gabble/Makefile index 70950dd1b0b..539fb7003f1 100644 --- a/net-im/telepathy-gabble/Makefile +++ b/net-im/telepathy-gabble/Makefile @@ -2,6 +2,7 @@ PORTNAME= telepathy-gabble PORTVERSION= 0.18.4 +PORTREVISION= 1 CATEGORIES= net-im MASTER_SITES= http://telepathy.freedesktop.org/releases/${PORTNAME}/ diff --git a/net-im/telepathy-glib/Makefile b/net-im/telepathy-glib/Makefile index 093cb84aeaa..447d4ecb7f2 100644 --- a/net-im/telepathy-glib/Makefile +++ b/net-im/telepathy-glib/Makefile @@ -3,7 +3,7 @@ PORTNAME= telepathy-glib PORTVERSION= 0.24.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= net-im MASTER_SITES= http://telepathy.freedesktop.org/releases/${PORTNAME}/ diff --git a/net-im/telepathy-idle/Makefile b/net-im/telepathy-idle/Makefile index 0e269f123c7..ea81386b390 100644 --- a/net-im/telepathy-idle/Makefile +++ b/net-im/telepathy-idle/Makefile @@ -3,7 +3,7 @@ PORTNAME= telepathy-idle PORTVERSION= 0.2.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= net-im MASTER_SITES= http://telepathy.freedesktop.org/releases/${PORTNAME}/ diff --git a/net-im/telepathy-logger-qt5/Makefile b/net-im/telepathy-logger-qt5/Makefile index c2a2c500800..0454d63791b 100644 --- a/net-im/telepathy-logger-qt5/Makefile +++ b/net-im/telepathy-logger-qt5/Makefile @@ -1,5 +1,6 @@ PORTNAME= telepathy-logger-qt DISTVERSION= 17.09.0 +PORTREVISION= 1 CATEGORIES= net-im MASTER_SITES= ${MASTER_SITE_KDE} MASTER_SITE_SUBDIR= stable/${PORTNAME}/${PORTVERSION:R}/src diff --git a/net-im/telepathy-logger/Makefile b/net-im/telepathy-logger/Makefile index 5386dd99da5..15c7b54bf9e 100644 --- a/net-im/telepathy-logger/Makefile +++ b/net-im/telepathy-logger/Makefile @@ -3,7 +3,7 @@ PORTNAME= telepathy-logger PORTVERSION= 0.8.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= net-im MASTER_SITES= http://telepathy.freedesktop.org/releases/${PORTNAME}/ diff --git a/net-im/telepathy-mission-control/Makefile b/net-im/telepathy-mission-control/Makefile index b3d6ab5765f..686df5855db 100644 --- a/net-im/telepathy-mission-control/Makefile +++ b/net-im/telepathy-mission-control/Makefile @@ -3,6 +3,7 @@ PORTNAME= telepathy-mission-control PORTVERSION= 5.16.6 +PORTREVISION= 1 CATEGORIES= net-im devel MASTER_SITES= http://telepathy.freedesktop.org/releases/telepathy-mission-control/ diff --git a/net-im/telepathy-qt/Makefile b/net-im/telepathy-qt/Makefile index 9b412d32f8f..3f8fdf88647 100644 --- a/net-im/telepathy-qt/Makefile +++ b/net-im/telepathy-qt/Makefile @@ -2,7 +2,7 @@ PORTNAME= telepathy-qt DISTVERSION= 0.9.8 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= net-im MASTER_SITES= https://telepathy.freedesktop.org/releases/${PORTNAME}/ PKGNAMESUFFIX= 5 diff --git a/net-im/telepathy-salut/Makefile b/net-im/telepathy-salut/Makefile index ac10e5869d9..7751f19b2f7 100644 --- a/net-im/telepathy-salut/Makefile +++ b/net-im/telepathy-salut/Makefile @@ -3,7 +3,7 @@ PORTNAME= telepathy-salut PORTVERSION= 0.8.1 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= net-im MASTER_SITES= http://telepathy.freedesktop.org/releases/${PORTNAME}/ diff --git a/net-mgmt/arpwatch/Makefile b/net-mgmt/arpwatch/Makefile index 343e1c1ebcb..a4e1e60dd2d 100644 --- a/net-mgmt/arpwatch/Makefile +++ b/net-mgmt/arpwatch/Makefile @@ -1,7 +1,7 @@ # Created by: Brian Somers PORTNAME= arpwatch -PORTVERSION= 3.2 +PORTVERSION= 3.3 CATEGORIES= net-mgmt MASTER_SITES= https://ee.lbl.gov/downloads/arpwatch/ \ LOCAL/leres/arpwatch diff --git a/net-mgmt/arpwatch/distinfo b/net-mgmt/arpwatch/distinfo index d060a59db3a..2929866cf61 100644 --- a/net-mgmt/arpwatch/distinfo +++ b/net-mgmt/arpwatch/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1639606389 -SHA256 (arpwatch-3.2.tar.gz) = 175fb3559535c2bb9ac2094ed4f89c6d18c97031dba393c2ef6dcdc521f3a340 -SIZE (arpwatch-3.2.tar.gz) = 117774 +TIMESTAMP = 1648326275 +SHA256 (arpwatch-3.3.tar.gz) = d47fa8b291fc37a25a2d0f3e1b64f451dc0be82d714a10ffa6ef8b0b9e33e166 +SIZE (arpwatch-3.3.tar.gz) = 117839 diff --git a/net-mgmt/check_ssl_cert/Makefile b/net-mgmt/check_ssl_cert/Makefile index 8aea09387a3..0e524584e29 100644 --- a/net-mgmt/check_ssl_cert/Makefile +++ b/net-mgmt/check_ssl_cert/Makefile @@ -1,6 +1,6 @@ PORTNAME= check_ssl_cert DISTVERSIONPREFIX= v -DISTVERSION= 2.22.0 +DISTVERSION= 2.23.0 CATEGORIES= net-mgmt MAINTAINER= cmt@FreeBSD.org diff --git a/net-mgmt/check_ssl_cert/distinfo b/net-mgmt/check_ssl_cert/distinfo index c48317dc8d0..b78ba0a9ae3 100644 --- a/net-mgmt/check_ssl_cert/distinfo +++ b/net-mgmt/check_ssl_cert/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647003313 -SHA256 (matteocorti-check_ssl_cert-v2.22.0_GH0.tar.gz) = dcf2f4b733bef9be3050298a1c50241b38bc6a1e04902cc30167485c8e863dd9 -SIZE (matteocorti-check_ssl_cert-v2.22.0_GH0.tar.gz) = 296995 +TIMESTAMP = 1648235746 +SHA256 (matteocorti-check_ssl_cert-v2.23.0_GH0.tar.gz) = b270d185c78493def5b68ba86a6069ddd4c903f0c383dc8782f7ff1df52d9141 +SIZE (matteocorti-check_ssl_cert-v2.23.0_GH0.tar.gz) = 301798 diff --git a/net-mgmt/ndpmon/Makefile b/net-mgmt/ndpmon/Makefile index a6054454ff6..0e4bf1fb80f 100644 --- a/net-mgmt/ndpmon/Makefile +++ b/net-mgmt/ndpmon/Makefile @@ -2,7 +2,7 @@ PORTNAME= ndpmon PORTVERSION= 1.4.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= net-mgmt MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTNAME}-1.4/ diff --git a/net-mgmt/netbox/Makefile b/net-mgmt/netbox/Makefile index de132f637d0..03c6f934b2a 100644 --- a/net-mgmt/netbox/Makefile +++ b/net-mgmt/netbox/Makefile @@ -1,6 +1,6 @@ PORTNAME= netbox DISTVERSIONPREFIX= v -DISTVERSION= 3.1.9 +DISTVERSION= 3.1.10 CATEGORIES= net-mgmt python MAINTAINER= kai@FreeBSD.org @@ -23,7 +23,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE.txt # - www/py-dj32-djangorestframework (in conjunction with www/py-dj32-drf-yasg) # BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}markdown-include>=0.6.0<1:textproc/py-markdown-include@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}mkdocs-material>=8.2.5<9:textproc/py-mkdocs-material@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}mkdocs-material>=8.2.7<9:textproc/py-mkdocs-material@${PY_FLAVOR} RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}django32>=3.2.11<3.3:www/py-django32@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}dj32-django-cors-headers>=3.11.0<4:www/py-dj32-django-cors-headers@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}dj32-django-debug-toolbar>=3.2.4<4:www/py-dj32-django-debug-toolbar@${PY_FLAVOR} \ @@ -41,17 +41,17 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}django32>=3.2.11<3.3:www/py-django32@${PY_FL ${PYTHON_PKGNAMEPREFIX}dj32-graphene-django>=2.15.0<3:devel/py-dj32-graphene-django@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}django-pglocks>=1.0.4<1.1:www/py-django-pglocks@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}Jinja2>=3.0.1<4:devel/py-Jinja2@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}markdown>=3.3.4<3.5:textproc/py-markdown@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}markdown>=3.3.6<3.5:textproc/py-markdown@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}netaddr>=0.8.0<1:net/py-netaddr@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pillow>=8.2.0:graphics/py-pillow@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}psycopg2>=2.9.2<3:databases/py-psycopg2@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}social-auth-app-django>=5.0.0<6:www/py-social-auth-app-django@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}social-auth-core>=4.1.0<5:security/py-social-auth-core@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}svgwrite>=1.4.1<2:graphics/py-svgwrite@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}svgwrite>=1.4.2<2:graphics/py-svgwrite@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}tablib>=3.2.0<4:textproc/py-tablib@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}tzdata>=2021.5:devel/py-tzdata@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}yaml>=5.4.1<7:devel/py-yaml@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}gunicorn>=19.9.0<21:www/py-gunicorn@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}gunicorn>=20.1.0<21:www/py-gunicorn@${PY_FLAVOR} USES= cpe pgsql:10+ python:3.7+ CPE_VENDOR= netbox_project diff --git a/net-mgmt/netbox/distinfo b/net-mgmt/netbox/distinfo index 96b9b48ef7d..b91328798f4 100644 --- a/net-mgmt/netbox/distinfo +++ b/net-mgmt/netbox/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646669081 -SHA256 (netbox-community-netbox-v3.1.9_GH0.tar.gz) = a0b5729554928cff4611902f0eb0aa7773ec0cd06e5545c9ca68df36f71b0405 -SIZE (netbox-community-netbox-v3.1.9_GH0.tar.gz) = 4405666 +TIMESTAMP = 1648231284 +SHA256 (netbox-community-netbox-v3.1.10_GH0.tar.gz) = c88940a9ea2bf1eac5cbb3fe9c884e505bd67181a2466a1bded58270337c7eaf +SIZE (netbox-community-netbox-v3.1.10_GH0.tar.gz) = 4408051 diff --git a/net-mgmt/netbox/pkg-plist b/net-mgmt/netbox/pkg-plist index 0fc40e6857b..359820288e3 100644 --- a/net-mgmt/netbox/pkg-plist +++ b/net-mgmt/netbox/pkg-plist @@ -113,6 +113,7 @@ %%PORTDOCS%%%%DOCSDIR%%/models/ipam/aggregate.md %%PORTDOCS%%%%DOCSDIR%%/models/ipam/asn.md %%PORTDOCS%%%%DOCSDIR%%/models/ipam/fhrpgroup.md +%%PORTDOCS%%%%DOCSDIR%%/models/ipam/fhrpgroupassignment.md %%PORTDOCS%%%%DOCSDIR%%/models/ipam/ipaddress.md %%PORTDOCS%%%%DOCSDIR%%/models/ipam/iprange.md %%PORTDOCS%%%%DOCSDIR%%/models/ipam/prefix.md @@ -529,8 +530,8 @@ %%DATADIR%%/project-static/docs/administration/permissions/index.html %%DATADIR%%/project-static/docs/administration/replicating-netbox/index.html %%DATADIR%%/project-static/docs/assets/images/favicon.png -%%DATADIR%%/project-static/docs/assets/javascripts/bundle.467223ff.min.js -%%DATADIR%%/project-static/docs/assets/javascripts/bundle.467223ff.min.js.map +%%DATADIR%%/project-static/docs/assets/javascripts/bundle.e87a5f81.min.js +%%DATADIR%%/project-static/docs/assets/javascripts/bundle.e87a5f81.min.js.map %%DATADIR%%/project-static/docs/assets/javascripts/lunr/min/lunr.ar.min.js %%DATADIR%%/project-static/docs/assets/javascripts/lunr/min/lunr.da.min.js %%DATADIR%%/project-static/docs/assets/javascripts/lunr/min/lunr.de.min.js @@ -557,10 +558,10 @@ %%DATADIR%%/project-static/docs/assets/javascripts/lunr/min/lunr.zh.min.js %%DATADIR%%/project-static/docs/assets/javascripts/lunr/tinyseg.js %%DATADIR%%/project-static/docs/assets/javascripts/lunr/wordcut.js -%%DATADIR%%/project-static/docs/assets/javascripts/workers/search.bd0b6b67.min.js -%%DATADIR%%/project-static/docs/assets/javascripts/workers/search.bd0b6b67.min.js.map -%%DATADIR%%/project-static/docs/assets/stylesheets/main.2d9f7617.min.css -%%DATADIR%%/project-static/docs/assets/stylesheets/main.2d9f7617.min.css.map +%%DATADIR%%/project-static/docs/assets/javascripts/workers/search.5e67fbfe.min.js +%%DATADIR%%/project-static/docs/assets/javascripts/workers/search.5e67fbfe.min.js.map +%%DATADIR%%/project-static/docs/assets/stylesheets/main.9d5733d3.min.css +%%DATADIR%%/project-static/docs/assets/stylesheets/main.9d5733d3.min.css.map %%DATADIR%%/project-static/docs/assets/stylesheets/palette.e6a45f82.min.css %%DATADIR%%/project-static/docs/assets/stylesheets/palette.e6a45f82.min.css.map %%DATADIR%%/project-static/docs/configuration/dynamic-settings/index.html @@ -668,6 +669,7 @@ %%DATADIR%%/project-static/docs/models/ipam/aggregate/index.html %%DATADIR%%/project-static/docs/models/ipam/asn/index.html %%DATADIR%%/project-static/docs/models/ipam/fhrpgroup/index.html +%%DATADIR%%/project-static/docs/models/ipam/fhrpgroupassignment/index.html %%DATADIR%%/project-static/docs/models/ipam/ipaddress/index.html %%DATADIR%%/project-static/docs/models/ipam/iprange/index.html %%DATADIR%%/project-static/docs/models/ipam/prefix/index.html @@ -861,6 +863,7 @@ %%DATADIR%%/templates/dcim/inc/device_import_header.html %%DATADIR%%/templates/dcim/inc/endpoint_connection.html %%DATADIR%%/templates/dcim/inc/interface_vlans_table.html +%%DATADIR%%/templates/dcim/inc/nonracked_devices.html %%DATADIR%%/templates/dcim/inc/rack_elevation.html %%DATADIR%%/templates/dcim/interface.html %%DATADIR%%/templates/dcim/interface_create.html diff --git a/net-mgmt/torrus/Makefile b/net-mgmt/torrus/Makefile index 12d6659c388..c56ae9dc314 100644 --- a/net-mgmt/torrus/Makefile +++ b/net-mgmt/torrus/Makefile @@ -2,6 +2,7 @@ PORTNAME= torrus PORTVERSION= 2.09 +PORTREVISION= 1 CATEGORIES= net-mgmt MASTER_SITES= SF/torrus/torrus/ diff --git a/net-mgmt/virt-viewer/Makefile b/net-mgmt/virt-viewer/Makefile index d83aa4e01f3..ec2a5a57439 100644 --- a/net-mgmt/virt-viewer/Makefile +++ b/net-mgmt/virt-viewer/Makefile @@ -2,6 +2,7 @@ PORTNAME= virt-viewer PORTVERSION= 11.0 +PORTREVISION= 1 CATEGORIES= net-mgmt MASTER_SITES= https://virt-manager.org/download/sources/${PORTNAME}/ diff --git a/net-p2p/dclib/Makefile b/net-p2p/dclib/Makefile index 1391aa70427..028cbdaf423 100644 --- a/net-p2p/dclib/Makefile +++ b/net-p2p/dclib/Makefile @@ -2,7 +2,7 @@ PORTNAME= dclib PORTVERSION= 0.3.23 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= net-p2p MASTER_SITES= SF/wxdcgui/${PORTNAME}/${PORTVERSION} diff --git a/net-p2p/gtk-gnutella/Makefile b/net-p2p/gtk-gnutella/Makefile index a63c4a6507c..2315f2f1758 100644 --- a/net-p2p/gtk-gnutella/Makefile +++ b/net-p2p/gtk-gnutella/Makefile @@ -2,6 +2,7 @@ PORTNAME= gtk-gnutella PORTVERSION= 1.2.2 +PORTREVISION= 1 CATEGORIES= net-p2p MASTER_SITES= SF diff --git a/net-p2p/linuxdcpp/Makefile b/net-p2p/linuxdcpp/Makefile index 9724e447737..de37158edba 100644 --- a/net-p2p/linuxdcpp/Makefile +++ b/net-p2p/linuxdcpp/Makefile @@ -1,6 +1,6 @@ PORTNAME= linuxdcpp PORTVERSION= 1.1.0 -PORTREVISION= 17 +PORTREVISION= 18 CATEGORIES= net-p2p MASTER_SITES= http://launchpadlibrarian.net/69733951/ diff --git a/net-p2p/microdc2/Makefile b/net-p2p/microdc2/Makefile index a20118ec301..114cc636422 100644 --- a/net-p2p/microdc2/Makefile +++ b/net-p2p/microdc2/Makefile @@ -2,7 +2,7 @@ PORTNAME= microdc2 PORTVERSION= 0.15.6 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= net-p2p MASTER_SITES= http://corsair626.no-ip.org/microdc/ diff --git a/net-p2p/minder/Makefile b/net-p2p/minder/Makefile index d1e77a999fd..a25ff3b8a21 100644 --- a/net-p2p/minder/Makefile +++ b/net-p2p/minder/Makefile @@ -2,7 +2,7 @@ PORTNAME= minder PORTVERSION= 2.0.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= net-p2p MASTER_SITES= http://www.alhem.net/project/minder/ diff --git a/net-p2p/py-vertex/Makefile b/net-p2p/py-vertex/Makefile index 14d9dd9dba0..d9bca832b8b 100644 --- a/net-p2p/py-vertex/Makefile +++ b/net-p2p/py-vertex/Makefile @@ -18,8 +18,9 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}epsilon>=0:devel/py-epsilon@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}openssl>=0:security/py-openssl@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}twisted>=0:devel/py-twisted@${PY_FLAVOR} -NO_ARCH= yes -USE_PYTHON= autoplist concurrent distutils USES= python:3.6+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes .include diff --git a/net-p2p/py-vertex/files/patch-2to3 b/net-p2p/py-vertex/files/patch-2to3 new file mode 100644 index 00000000000..6f04ad175c5 --- /dev/null +++ b/net-p2p/py-vertex/files/patch-2to3 @@ -0,0 +1,519 @@ +--- vertex/gtk2hack.py.orig 2013-08-05 02:42:24 UTC ++++ vertex/gtk2hack.py +@@ -15,7 +15,7 @@ class _NullCb: + self.name = name + + def __call__(self, *a, **kw): +- print 'No callback provided for', self.name, a, kw ++ print('No callback provided for', self.name, a, kw) + + class _SignalAttacher: + def __init__(self, original): +@@ -100,19 +100,19 @@ class AcceptConnectionDialog: + + def acceptConnectionEvt(self, evt): + self.done = True +- print "YES" ++ print("YES") + self.d.callback(1) +- print "WHAT" ++ print("WHAT") + self.window.destroy() + + def rejectConnectionEvt(self, evt): +- print "DSTRY" ++ print("DSTRY") + if not self.done: +- print "DIE!" ++ print("DIE!") + from twisted.python import failure + self.d.errback(failure.Failure(KeyError("Connection rejected by user"))) + else: +- print "OK" ++ print("OK") + + from twisted.internet.protocol import ServerFactory + from twisted.internet.protocol import Protocol +@@ -120,10 +120,10 @@ from twisted.internet.protocol import Protocol + class VertexDemoProtocol(Protocol): + + def connectionMade(self): +- print 'CONN MADE' ++ print('CONN MADE') + + def dataReceived(self, data): +- print 'HOLY SHNIKIES', data ++ print('HOLY SHNIKIES', data) + + class VertexFactory(ServerFactory): + protocol = VertexDemoProtocol +@@ -151,7 +151,7 @@ class BuddyItem: + self.plug.loadedBuddies[q2qaddress] = self + + def initiateFileTransfer(self, evt): +- print 'Initiate transfer with ' + self.alias + self.q2qaddress ++ print('Initiate transfer with ' + self.alias + self.q2qaddress) + + def addToMenu(self): + self.plug.section.append(self.menuItem) +@@ -170,7 +170,7 @@ class PlugEntry: + self.xml = gtk.glade.XML(GLADE_FILE, "notification_popup") + + def register(self, section): +- print 'REGISTER' ++ print('REGISTER') + self.section = section + + workingdir = FilePath(os.path.expanduser("~/.vertex")) +@@ -197,11 +197,11 @@ class PlugEntry: + self.buildContactMenu() + + def clearContactMenu(self): +- for bud in self.loadedBuddies.values(): ++ for bud in list(self.loadedBuddies.values()): + bud.removeFromMenu() + + def buildContactMenu(self): +- l = self.loadedBuddies.values() ++ l = list(self.loadedBuddies.values()) + l.sort(key=lambda x: x.alias) + l.reverse() + for bud in l: +--- vertex/ptcp.py.orig 2013-08-05 02:42:24 UTC ++++ vertex/ptcp.py +@@ -17,7 +17,7 @@ from vertex import tcpdfa + from vertex.statemachine import StateError + + +-genConnID = itertools.count(8).next ++genConnID = itertools.count(8).__next__ + + MAX_PSEUDO_PORT = (2 ** 16) + +@@ -818,7 +818,9 @@ class PTCPConnection(tcpdfa.TCP): + class PTCPAddress(object): + # garbage + +- def __init__(self, (host, port), (pseudoHostPort, pseudoPeerPort)): ++ def __init__(self, xxx_todo_changeme, xxx_todo_changeme1): ++ (host, port) = xxx_todo_changeme ++ (pseudoHostPort, pseudoPeerPort) = xxx_todo_changeme1 + self.host = host + self.port = port + self.pseudoHostPort = pseudoHostPort +@@ -945,7 +947,7 @@ class PTCP(protocol.DatagramProtocol): + stop notifications, sending hail-mary final FIN packets (which may not + reach the other end, but nevertheless can be useful) when possible. + """ +- for conn in self._connections.values(): ++ for conn in list(self._connections.values()): + conn.immediateShutdown() + assert not self._connections + +@@ -994,11 +996,11 @@ class PTCP(protocol.DatagramProtocol): + stb=True, + destination=addr)) + except GarbageDataError: +- print "garbage data!", pkt +- except ChecksumMismatchError, cme: +- print "bad checksum", pkt, cme +- print repr(pkt.data) +- print hex(pkt.checksum), hex(pkt.computeChecksum()) ++ print("garbage data!", pkt) ++ except ChecksumMismatchError as cme: ++ print("bad checksum", pkt, cme) ++ print(repr(pkt.data)) ++ print(hex(pkt.checksum), hex(pkt.computeChecksum())) + else: + self.packetReceived(pkt) + +--- vertex/q2q.py.orig 2015-03-05 04:12:41 UTC ++++ vertex/q2q.py +@@ -373,7 +373,7 @@ class TCPMethod: + def attempt(self, *a): + return [self.attemptFactory(self, *a)] + +-connectionCounter = itertools.count().next ++connectionCounter = itertools.count().__next__ + connectionCounter() + + class VirtualConnectionAttempt(AbstractConnectionAttempt): +@@ -441,7 +441,7 @@ class _PTCPConnectionAttemptPress(AbstractConnectionAt + if not self.cancelled: + self.q2qproto.service.dispatcher.unbindPort(self.newPort) + else: +- print 'totally wacky, [press] cancelled twice!' ++ print('totally wacky, [press] cancelled twice!') + AbstractConnectionAttempt.cancel(self) + + class PTCPMethod(TCPMethod): +@@ -498,7 +498,7 @@ class RPTCPConnectionAttempt(AbstractConnectionAttempt + if not self.cancelled: + self.q2qproto.service.dispatcher.unbindPort(self.newPort) + else: +- print 'totally wacky, [rptcp] cancelled twice!' ++ print('totally wacky, [rptcp] cancelled twice!') + AbstractConnectionAttempt.cancel(self) + + +@@ -866,7 +866,7 @@ class Q2Q(AMP, subproducer.SuperProducer): + log.msg("removing remote listener for %r" % (key,)) + self.service.listeningClients[key].remove(value) + self.listeningClient = [] +- for xport in self.connections.values(): ++ for xport in list(self.connections.values()): + safely(xport.connectionLost, reason) + for observer in self.connectionObservers: + safely(observer) +@@ -1349,7 +1349,7 @@ class Q2Q(AMP, subproducer.SuperProducer): + + subj = certificate_request.getSubject() + +- sk = subj.keys() ++ sk = list(subj.keys()) + if 'commonName' not in sk: + raise BadCertificateRequest( + "Certificate requested with bad subject: %s" % (sk,)) +@@ -1926,7 +1926,7 @@ class DefaultQ2QAvatar: + + def signCertificateRequest(self, certificateRequest, + domainCert, suggestedSerial): +- keyz = certificateRequest.getSubject().keys() ++ keyz = list(certificateRequest.getSubject().keys()) + if keyz != ['commonName']: + raise BadCertificateRequest( + "Don't know how to verify fields other than CN: " + +@@ -2034,7 +2034,7 @@ class _pemmap(object): + def file(self, name, mode): + try: + return file(os.path.join(self.pathname, name)+'.pem', mode) +- except IOError, ioe: ++ except IOError as ioe: + raise KeyError(name, ioe) + + def __setitem__(self, key, cert): +@@ -2054,21 +2054,21 @@ class _pemmap(object): + yield key, value + + def items(self): +- return list(self.iteritems()) ++ return list(self.items()) + + def iterkeys(self): +- for k, v in self.iteritems(): ++ for k, v in self.items(): + yield k + + def keys(self): +- return list(self.iterkeys()) ++ return list(self.keys()) + + def itervalues(self): +- for k, v in self.iteritems(): ++ for k, v in self.items(): + yield v + + def values(self): +- return list(self.itervalues()) ++ return list(self.values()) + + + +@@ -2149,7 +2149,8 @@ class PTCPConnectionDispatcher(object): + self.factory = factory + self._ports = {} + +- def seedNAT(self, (host, port), sourcePort=0, conditional=True): ++ def seedNAT(self, xxx_todo_changeme, sourcePort=0, conditional=True): ++ (host, port) = xxx_todo_changeme + if sourcePort not in self._ports: + if sourcePort != 0: + if conditional: +@@ -2183,8 +2184,8 @@ class PTCPConnectionDispatcher(object): + return proto.connect(factory, host, port) + + def iterconnections(self): +- for (p, proto) in self._ports.itervalues(): +- for c in p.protocol._connections.itervalues(): ++ for (p, proto) in self._ports.values(): ++ for c in p.protocol._connections.values(): + if c.protocol is not None: + yield c.protocol + else: +@@ -2193,8 +2194,8 @@ class PTCPConnectionDispatcher(object): + + def killAllConnections(self): + dl = [] +- for p, proto in self._ports.itervalues(): +- for c in p.protocol._connections.itervalues(): ++ for p, proto in self._ports.values(): ++ for c in p.protocol._connections.values(): + c._stopRetransmitting() + dl.append(defer.maybeDeferred(p.stopListening)) + self._ports = {} +@@ -2227,8 +2228,8 @@ class Q2QService(service.MultiService, protocol.Server + not. For testing purposes only. + """ + return itertools.chain( +- self.appConnectionCache.cachedConnections.itervalues(), +- self.secureConnectionCache.cachedConnections.itervalues(), ++ iter(self.appConnectionCache.cachedConnections.values()), ++ iter(self.secureConnectionCache.cachedConnections.values()), + iter(self.subConnections), + (self.dispatcher or ()) and self.dispatcher.iterconnections()) + +@@ -2328,7 +2329,7 @@ class Q2QService(service.MultiService, protocol.Server + def _secured(proto): + lfm = self.localFactoriesMapping + def startup(listenResult): +- for protocol, factory in protocolsToFactories.iteritems(): ++ for protocol, factory in protocolsToFactories.items(): + key = (fromAddress, protocol) + if key not in lfm: + lfm[key] = [] +@@ -2336,7 +2337,7 @@ class Q2QService(service.MultiService, protocol.Server + factory.doStart() + + def shutdown(): +- for protocol, factory in protocolsToFactories.iteritems(): ++ for protocol, factory in protocolsToFactories.items(): + lfm[fromAddress, protocol].remove( + (factory, serverDescription)) + factory.doStop() +@@ -2353,7 +2354,7 @@ class Q2QService(service.MultiService, protocol.Server + + def _gotPubUDPPort(publicAddress): + self._publicUDPAddress = publicAddress +- return proto.listen(fromAddress, protocolsToFactories.keys(), ++ return proto.listen(fromAddress, list(protocolsToFactories.keys()), + serverDescription).addCallback(startup) + pubUDPDeferred.addCallback(_gotPubUDPPort) + return pubUDPDeferred +@@ -2501,7 +2502,7 @@ class Q2QService(service.MultiService, protocol.Server + + def stopService(self): + dl = [] +- for cwait, delayed in self.inboundConnections.itervalues(): ++ for cwait, delayed in self.inboundConnections.values(): + delayed.cancel() + self.inboundConnections.clear() + if self.q2qPort is not None: +--- vertex/q2qclient.py.orig 2015-03-05 02:53:34 UTC ++++ vertex/q2qclient.py +@@ -24,7 +24,7 @@ class Q2QAuthorize(Options): + self.password = password + + def reportNoCertificate(self, error): +- print "No certificate retrieved:", error.getErrorMessage(), "(see ~/.q2q-client-log for details)" ++ print("No certificate retrieved:", error.getErrorMessage(), "(see ~/.q2q-client-log for details)") + log.err(error) + return None + +@@ -115,7 +115,7 @@ class FileSender(protocol.Protocol): + self.transport.loseConnection() + + def dataReceived(self, data): +- print "WTF THE CLIENT IS GETTING DATA", repr(data) ++ print("WTF THE CLIENT IS GETTING DATA", repr(data)) + + def registerProducer(self, producer, streaming): + self.transport.registerProducer(producer, streaming) +@@ -166,9 +166,9 @@ class ClientQ2QService(q2q.Q2QService): + *a, **kw) + + def getDefaultFrom(self, default=None): +- i = self.certificateStorage.localStore.iterkeys() ++ i = iter(self.certificateStorage.localStore.keys()) + try: +- return i.next() ++ return next(i) + except StopIteration: + return default + +@@ -244,7 +244,7 @@ class Q2QReceive(Options): + def pr(x): + return x + def stopit(err): +- print "Couldn't Register for File Transfer:", err.getErrorMessage() ++ print("Couldn't Register for File Transfer:", err.getErrorMessage()) + log.err(err) + reactor.stop() + serv.listenQ2Q(self.parent.getFrom(), +@@ -276,13 +276,13 @@ class TextNexusUI(sigma.BaseNexusUI): + self.call.start(5) + + def report(self): +- print 'Transloads:', len(self.transloads) ++ print('Transloads:', len(self.transloads)) + for transloadui in self.transloads: +- print '---', transloadui.name, '---' +- print transloadui.bits.percent() +- for peer, mask in transloadui.masks.items(): +- print peer, mask.percent() +- print 'end report' ++ print('---', transloadui.name, '---') ++ print(transloadui.bits.percent()) ++ for peer, mask in list(transloadui.masks.items()): ++ print(peer, mask.percent()) ++ print('end report') + + class Q2QSigma(Options): + +@@ -346,7 +346,7 @@ class Q2QRegister(Options): + svc = self.parent.getService() + + def showit(x): +- print "%s: %s" % (x.value.__class__, x.getErrorMessage()) ++ print("%s: %s" % (x.value.__class__, x.getErrorMessage())) + + enregister(svc, newAddress, self.password).addErrback( + showit).addBoth(lambda nothing: reactor.stop()) +--- vertex/q2qstandalone.py.orig 2015-03-05 02:53:34 UTC ++++ vertex/q2qstandalone.py +@@ -41,7 +41,8 @@ class _usermap: + def __init__(self, path): + self.path = path + +- def __setitem__(self, (domain, username), password): ++ def __setitem__(self, xxx_todo_changeme, password): ++ (domain, username) = xxx_todo_changeme + domainpath = os.path.join(self.path, domain) + if not os.path.exists(domainpath): + os.makedirs(domainpath) +@@ -53,7 +54,8 @@ class _usermap: + password=password.encode('hex')).serialize()) + f.close() + +- def get(self, (domain, username)): ++ def get(self, xxx_todo_changeme1): ++ (domain, username) = xxx_todo_changeme1 + domainpath = os.path.join(self.path, domain) + if os.path.exists(domainpath): + filepath = os.path.join(domainpath, username+".info") +@@ -70,7 +72,7 @@ class DirectoryCertificateAndUserStore(q2q.DirectoryCe + try: + return q2q.DirectoryCertificateStore.getPrivateCertificate(self, domain) + except KeyError: +- if len(self.localStore.keys()) > 10: ++ if len(list(self.localStore.keys())) > 10: + # avoid DoS; nobody is going to need autocreated certs for more + # than 10 domains + raise +--- vertex/test/helpers.py.orig 2015-03-05 02:53:34 UTC ++++ vertex/test/helpers.py +@@ -63,17 +63,17 @@ class FakeQ2QService: + if pump.flush(debug): + result = True + if debug: +- print 'iteration finished. continuing?', result ++ print('iteration finished. continuing?', result) + c = self.calls + self.calls = [] + for s, f, a, k in c: + if debug: +- print 'timed event', s, f, a, k ++ print('timed event', s, f, a, k) + f(*a,**k) + return result + + def listenQ2Q(self, fromAddress, protocolsToFactories, serverDescription): +- for pname, pfact in protocolsToFactories.items(): ++ for pname, pfact in list(protocolsToFactories.items()): + self.listeners[fromAddress, pname] = pfact, serverDescription + return defer.succeed(None) + +@@ -90,7 +90,7 @@ class FakeQ2QService: + + listener, description = self.listeners.get((toAddress, protocolName)) + if listener is None: +- print 'void listener', fromAddress, toAddress, self.listeners, self.listener ++ print('void listener', fromAddress, toAddress, self.listeners, self.listener) + reason = Failure(KeyError()) + protocolFactory.clientConnectionFailed(None, reason) + return defer.fail(reason) +--- vertex/test/test_dependencyservice.py.orig 2013-08-05 02:42:24 UTC ++++ vertex/test/test_dependencyservice.py +@@ -35,10 +35,10 @@ class TestDependencyService(unittest.TestCase): + args = dict(one={}, two={}, three={}) + + one = One(**args) +- self.assert_(one.initialized == ['ONE', 'THREE', 'TWO']) ++ self.assertTrue(one.initialized == ['ONE', 'THREE', 'TWO']) + + two = Two(**args) +- self.assert_(two.initialized == ['ONE', 'TWO', 'THREE']) ++ self.assertTrue(two.initialized == ['ONE', 'TWO', 'THREE']) + + + def test_circularDepends(self): +@@ -52,7 +52,7 @@ class TestDependencyService(unittest.TestCase): + except depserv.StartupError: + pass + else: +- raise unittest.FailTest, 'circular dependencies did not raise an error' ++ raise unittest.FailTest('circular dependencies did not raise an error') + + + def test_requiredWithDependency(self): +@@ -66,4 +66,4 @@ class TestDependencyService(unittest.TestCase): + except depserv.StartupError: + pass + else: +- raise unittest.FailTest, 'unsatisfied dependencies did not raise an error' ++ raise unittest.FailTest('unsatisfied dependencies did not raise an error') +--- vertex/test/test_ptcp.py.orig 2015-03-05 04:12:41 UTC ++++ vertex/test/test_ptcp.py +@@ -48,8 +48,8 @@ class TestProtocol(protocol.Protocol): + bytes = ''.join(self.buffer) + if not self._waiting[1].startswith(bytes): + x = len(os.path.commonprefix([bytes, self._waiting[1]])) +- print x +- print 'it goes wrong starting with', repr(bytes[x:x+100]), repr(self._waiting[1][x:x+100]) ++ print(x) ++ print('it goes wrong starting with', repr(bytes[x:x+100]), repr(self._waiting[1][x:x+100])) + if bytes == self._waiting[1]: + self._waiting[0].callback(None) + self._waiting = None +@@ -167,11 +167,11 @@ class PTCPTransportTestCase(ConnectedPTCPMixin, unitte + + def gotAddress(results): + (serverSuccess, serverAddress), (clientSuccess, clientAddress) = results +- self.failUnless(serverSuccess) +- self.failUnless(clientSuccess) ++ self.assertTrue(serverSuccess) ++ self.assertTrue(clientSuccess) + +- self.assertEquals(serverAddress[1], serverPort.getHost().port) +- self.assertEquals(clientAddress[1], clientPort.getHost().port) ++ self.assertEqual(serverAddress[1], serverPort.getHost().port) ++ self.assertEqual(clientAddress[1], clientPort.getHost().port) + + def connectionsMade(ignored): + return defer.DeferredList([serverProto.transport.whoami(), clientProto.transport.whoami()]).addCallback(gotAddress) +@@ -228,7 +228,7 @@ class PTCPTransportTestCase(ConnectedPTCPMixin, unitte + ServerProtocol=TestProducerProtocol) + + def disconnected(ignored): +- self.assertEquals( ++ self.assertEqual( + ''.join(clientProto.buffer), + ''.join([chr(n) * serverProto.WRITE_SIZE + for n in range(serverProto.NUM_WRITES)])) +@@ -249,7 +249,7 @@ class PTCPTransportTestCase(ConnectedPTCPMixin, unitte + clientProto.transport.resumeProducing() + + def cbBytes(ignored): +- self.failUnless(resumed) ++ self.assertTrue(resumed) + clientProto.transport.loseConnection() + + def cbConnect(ignored): +@@ -282,7 +282,7 @@ class PTCPTransportTestCase(ConnectedPTCPMixin, unitte + + def cbBytes(ignored): + # print 'Disconnected' +- self.assertEquals( ++ self.assertEqual( + ''.join(clientProto.buffer), + ''.join([chr(n) * serverProto.WRITE_SIZE + for n in range(serverProto.NUM_WRITES)])) diff --git a/net-p2p/qbittorrent/Makefile b/net-p2p/qbittorrent/Makefile index 9b8ba1122da..bcfe8256233 100644 --- a/net-p2p/qbittorrent/Makefile +++ b/net-p2p/qbittorrent/Makefile @@ -1,7 +1,7 @@ # Created by: Doug Barton PORTNAME= qbittorrent -DISTVERSION= 4.4.1 +DISTVERSION= 4.4.2 CATEGORIES= net-p2p MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTNAME}-${PORTVERSION} diff --git a/net-p2p/qbittorrent/distinfo b/net-p2p/qbittorrent/distinfo index bae4af10abf..68dbe7d2361 100644 --- a/net-p2p/qbittorrent/distinfo +++ b/net-p2p/qbittorrent/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645860709 -SHA256 (qbittorrent-4.4.1.tar.xz) = 1386f000ce1d791469c3ea03e3951ca25f67f534e66896592bd12357dda9a8ec -SIZE (qbittorrent-4.4.1.tar.xz) = 4814572 +TIMESTAMP = 1648349711 +SHA256 (qbittorrent-4.4.2.tar.xz) = efa580924e96605bae916b9a8ae1f3fce82a5130647ae41d74d689761262463d +SIZE (qbittorrent-4.4.2.tar.xz) = 4840496 diff --git a/net-p2p/retroshare/Makefile b/net-p2p/retroshare/Makefile index 20797b836c3..08f7b42d59c 100644 --- a/net-p2p/retroshare/Makefile +++ b/net-p2p/retroshare/Makefile @@ -3,7 +3,7 @@ PORTNAME= retroshare DISTVERSIONPREFIX= v DISTVERSION= 0.6.4 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= net-p2p PATCH_SITES= https://github.com/${GH_ACCOUNT}/${GH_PROJECT}/commit/ diff --git a/net/Sockets/Makefile b/net/Sockets/Makefile index fa3086fa4a9..7571e5c0945 100644 --- a/net/Sockets/Makefile +++ b/net/Sockets/Makefile @@ -2,7 +2,7 @@ PORTNAME= Sockets PORTVERSION= 2.3.9.9 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= net MASTER_SITES= http://www.alhem.net/Sockets/ \ https://www.netfence.it/Sockets/ diff --git a/net/asterisk18/Makefile b/net/asterisk18/Makefile index 4a6418bd958..450ccf5efda 100644 --- a/net/asterisk18/Makefile +++ b/net/asterisk18/Makefile @@ -1,5 +1,6 @@ PORTNAME= asterisk PORTVERSION= 18.11.0 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= https://downloads.asterisk.org/pub/telephony/%SUBDIR%/:DEFAULT,g729 MASTER_SITE_SUBDIR= asterisk/ \ diff --git a/net/ceph14/Makefile b/net/ceph14/Makefile index 4b49cab54e0..0e5608d69ff 100644 --- a/net/ceph14/Makefile +++ b/net/ceph14/Makefile @@ -3,7 +3,7 @@ PORTNAME= ceph DISTVERSIONPREFIX= v DISTVERSION= 14.2.22 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= net PKGNAMESUFFIX= 14 diff --git a/net/fort/Makefile b/net/fort/Makefile index ee850aea883..210a0ff3437 100644 --- a/net/fort/Makefile +++ b/net/fort/Makefile @@ -1,5 +1,6 @@ PORTNAME= fort DISTVERSION= 1.5.3 +PORTREVISION= 1 CATEGORIES= net MAINTAINER= toni@devboks.com diff --git a/net/gfbgraph/Makefile b/net/gfbgraph/Makefile index 1038a2c38b6..d79ef81cb11 100644 --- a/net/gfbgraph/Makefile +++ b/net/gfbgraph/Makefile @@ -2,6 +2,7 @@ PORTNAME= gfbgraph DISTVERSION= 0.2.4 +PORTREVISION= 1 CATEGORIES= net www MASTER_SITES= GNOME diff --git a/net/gitlab-agent/Makefile b/net/gitlab-agent/Makefile index 68247b8f034..8686376befe 100644 --- a/net/gitlab-agent/Makefile +++ b/net/gitlab-agent/Makefile @@ -1,7 +1,7 @@ # Created by: Matthias Fechner PORTNAME= gitlab-agent -PORTVERSION= 14.8.1 +PORTVERSION= 14.9.0 PORTREVISION= 0 DISTVERSIONPREFIX= v CATEGORIES= net diff --git a/net/gitlab-agent/distinfo b/net/gitlab-agent/distinfo index 4e725a619f0..2ac47f1ceb6 100644 --- a/net/gitlab-agent/distinfo +++ b/net/gitlab-agent/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1646774306 -SHA256 (go/net_gitlab-agent/gitlab-agent-v14.8.1/v14.8.1.mod) = ad382b1de5d7f2197f934ac814c13c85bd7c774020fb03a362076605294da3cd -SIZE (go/net_gitlab-agent/gitlab-agent-v14.8.1/v14.8.1.mod) = 11730 -SHA256 (go/net_gitlab-agent/gitlab-agent-v14.8.1/v14.8.1.zip) = b60aa5fe0e071c37a4a1cbe7988755bbcf72b914bb6a68235463b92ea6ef7a25 -SIZE (go/net_gitlab-agent/gitlab-agent-v14.8.1/v14.8.1.zip) = 849106 +TIMESTAMP = 1647949976 +SHA256 (go/net_gitlab-agent/gitlab-agent-v14.9.0/v14.9.0.mod) = 2f43ddd7e1d5a84ce8bfa6acb0d238811bd4492cb69be834ba01e48f275b6f58 +SIZE (go/net_gitlab-agent/gitlab-agent-v14.9.0/v14.9.0.mod) = 11730 +SHA256 (go/net_gitlab-agent/gitlab-agent-v14.9.0/v14.9.0.zip) = fb893d0809d0deaa492253e2ffc819960f70e7ef4a0c730f6097c679c67af7c6 +SIZE (go/net_gitlab-agent/gitlab-agent-v14.9.0/v14.9.0.zip) = 862506 diff --git a/net/glusterfs/Makefile b/net/glusterfs/Makefile index cf5346b73a2..1f407e239ca 100644 --- a/net/glusterfs/Makefile +++ b/net/glusterfs/Makefile @@ -1,5 +1,6 @@ PORTNAME= glusterfs PORTVERSION= 8.4 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= http://bits.gluster.org/pub/gluster/glusterfs/src/ diff --git a/net/gmid/Makefile b/net/gmid/Makefile index 0f5e9c29341..b4086ffd118 100644 --- a/net/gmid/Makefile +++ b/net/gmid/Makefile @@ -1,5 +1,5 @@ PORTNAME= gmid -DISTVERSION= 1.8.1 +DISTVERSION= 1.8.3 CATEGORIES= net MASTER_SITES= https://github.com/omar-polo/gmid/releases/download/${DISTVERSION}/ diff --git a/net/gmid/distinfo b/net/gmid/distinfo index bb64aab5294..0e9c9f07c92 100644 --- a/net/gmid/distinfo +++ b/net/gmid/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1644535162 -SHA256 (gmid-1.8.1.tar.gz) = 7605d3f692dc509540a85c4f72b0f5dbe1fc9099cc57a6345e0821cf6b028854 -SIZE (gmid-1.8.1.tar.gz) = 140350 +TIMESTAMP = 1648387071 +SHA256 (gmid-1.8.3.tar.gz) = f7b6cc1deaea6c5242301fa7440c0f5caaf282fea780ba1edcef480f7fa67fe1 +SIZE (gmid-1.8.3.tar.gz) = 141650 diff --git a/net/gnome-online-accounts/Makefile b/net/gnome-online-accounts/Makefile index fd6f54c5188..697cc2d0d82 100644 --- a/net/gnome-online-accounts/Makefile +++ b/net/gnome-online-accounts/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-online-accounts PORTVERSION= 3.40.1 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= GNOME diff --git a/net/gnome-online-miners/Makefile b/net/gnome-online-miners/Makefile index 397d3615be8..2c5c8a24905 100644 --- a/net/gnome-online-miners/Makefile +++ b/net/gnome-online-miners/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-online-miners PORTVERSION= 3.34.0 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= GNOME diff --git a/net/gq/Makefile b/net/gq/Makefile index 36ebd2d0cdb..0322d0661c3 100644 --- a/net/gq/Makefile +++ b/net/gq/Makefile @@ -2,7 +2,7 @@ PORTNAME= gq PORTVERSION= 1.3.4 -PORTREVISION= 15 +PORTREVISION= 16 PORTEPOCH= 1 CATEGORIES= net MASTER_SITES= SF/${PORTNAME}client/GQ%20Unstable/${PORTVERSION} diff --git a/net/grilo-plugins/Makefile b/net/grilo-plugins/Makefile index 3a60266fd62..e9347c52b29 100644 --- a/net/grilo-plugins/Makefile +++ b/net/grilo-plugins/Makefile @@ -2,6 +2,7 @@ PORTNAME= grilo-plugins PORTVERSION= 0.3.14 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= GNOME diff --git a/net/grilo/Makefile b/net/grilo/Makefile index 92f6af51d77..a7c31553b26 100644 --- a/net/grilo/Makefile +++ b/net/grilo/Makefile @@ -2,6 +2,7 @@ PORTNAME= grilo PORTVERSION= 0.3.14 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= GNOME diff --git a/net/gstreamer1-plugins-libmms/Makefile b/net/gstreamer1-plugins-libmms/Makefile index cac956dd591..f834c4935ff 100644 --- a/net/gstreamer1-plugins-libmms/Makefile +++ b/net/gstreamer1-plugins-libmms/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= net COMMENT= GStreamer mms:// and mmsh:// plugin diff --git a/net/gstreamer1-plugins-srtp/Makefile b/net/gstreamer1-plugins-srtp/Makefile index 8f8a68eccff..11ba01c4f30 100644 --- a/net/gstreamer1-plugins-srtp/Makefile +++ b/net/gstreamer1-plugins-srtp/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= net COMMENT= GStreamer srtp plugin diff --git a/net/gupnp-av/Makefile b/net/gupnp-av/Makefile index b80afa83512..74c7bf81447 100644 --- a/net/gupnp-av/Makefile +++ b/net/gupnp-av/Makefile @@ -2,6 +2,7 @@ PORTNAME= gupnp-av PORTVERSION= 0.14.0 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= GNOME diff --git a/net/gupnp-dlna/Makefile b/net/gupnp-dlna/Makefile index 2bd2622e73f..87cde492a7a 100644 --- a/net/gupnp-dlna/Makefile +++ b/net/gupnp-dlna/Makefile @@ -2,7 +2,7 @@ PORTNAME= gupnp-dlna PORTVERSION= 0.10.3 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= net MASTER_SITES= GNOME diff --git a/net/gupnp-tools/Makefile b/net/gupnp-tools/Makefile index 8792741d380..d88a67c1d1d 100644 --- a/net/gupnp-tools/Makefile +++ b/net/gupnp-tools/Makefile @@ -2,7 +2,7 @@ PORTNAME= gupnp-tools PORTVERSION= 0.10.2 -PORTREVISION= 1 +PORTREVISION= 3 CATEGORIES= net MASTER_SITES= GNOME @@ -21,7 +21,7 @@ LIB_DEPENDS= libgssdp-1.2.so:net/gssdp \ libgupnp-1.2.so:net/gupnp \ libsoup-2.4.so:devel/libsoup -USES= gettext gnome meson pkgconfig tar:xz +USES= gettext-tools gnome meson pkgconfig tar:xz USE_GNOME= glib20 gtk30 libxml2 USE_LDCONFIG= yes diff --git a/net/gupnp/Makefile b/net/gupnp/Makefile index d9eb80b190b..3e5a1a859cc 100644 --- a/net/gupnp/Makefile +++ b/net/gupnp/Makefile @@ -2,6 +2,7 @@ PORTNAME= gupnp PORTVERSION= 1.4.3 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= GNOME diff --git a/net/kamailio/Makefile b/net/kamailio/Makefile index 113bb0daa9a..72fefd010ed 100644 --- a/net/kamailio/Makefile +++ b/net/kamailio/Makefile @@ -2,6 +2,7 @@ PORTNAME= kamailio PORTVERSION= 5.5.4 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= http://www.kamailio.org/pub/kamailio/${PORTVERSION}/src/ DISTNAME= ${PORTNAME}-${PORTVERSION}_src diff --git a/net/kitinerary/Makefile b/net/kitinerary/Makefile index 031e5287efb..d7930ecacd8 100644 --- a/net/kitinerary/Makefile +++ b/net/kitinerary/Makefile @@ -1,5 +1,6 @@ PORTNAME= kitinerary DISTVERSION= ${KDE_APPLICATIONS_VERSION} +PORTREVISION= 1 CATEGORIES= net kde kde-applications MAINTAINER= kde@FreeBSD.org diff --git a/net/libcmis/Makefile b/net/libcmis/Makefile index 5f2104ee905..3d020ebdc21 100644 --- a/net/libcmis/Makefile +++ b/net/libcmis/Makefile @@ -1,6 +1,6 @@ PORTNAME= libcmis PORTVERSION= 0.5.2 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= net devel MASTER_SITES= https://github.com/tdf/libcmis/releases/download/v${PORTVERSION}/ \ https://dev-www.libreoffice.org/src/ diff --git a/net/libgnetwork/Makefile b/net/libgnetwork/Makefile index 132b4e50fb4..32775f11806 100644 --- a/net/libgnetwork/Makefile +++ b/net/libgnetwork/Makefile @@ -2,7 +2,7 @@ PORTNAME= libgnetwork PORTVERSION= 0.0.9 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= net gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/net/libgrss/Makefile b/net/libgrss/Makefile index a6a2d18fcf9..de8ef6a8f81 100644 --- a/net/libgrss/Makefile +++ b/net/libgrss/Makefile @@ -2,6 +2,7 @@ PORTNAME= libgrss PORTVERSION= 0.7.0 +PORTREVISION= 1 CATEGORIES= net gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/net/libgweather/Makefile b/net/libgweather/Makefile index 86bf2ef424e..8e6951314d4 100644 --- a/net/libgweather/Makefile +++ b/net/libgweather/Makefile @@ -2,7 +2,7 @@ PORTNAME= libgweather PORTVERSION= 40.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= net gnome MASTER_SITES= GNOME MASTER_SITE_SUBDIR= sources/${PORTNAME}/${PORTVERSION:R} diff --git a/net/libgweather4/Makefile b/net/libgweather4/Makefile index fddaba4e365..04110a783fe 100644 --- a/net/libgweather4/Makefile +++ b/net/libgweather4/Makefile @@ -1,5 +1,6 @@ PORTNAME= libgweather4 PORTVERSION= 4.0.0 +PORTREVISION= 1 CATEGORIES= net gnome MASTER_SITES= GNOME MASTER_SITE_SUBDIR= sources/libgweather/${PORTVERSION:R} diff --git a/net/liblinphone/Makefile b/net/liblinphone/Makefile index 8fb352c0795..d44fa7d5883 100644 --- a/net/liblinphone/Makefile +++ b/net/liblinphone/Makefile @@ -1,6 +1,6 @@ PORTNAME= linphone PORTVERSION= 3.12.0 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= net MASTER_SITES= https://www.linphone.org/releases/sources/linphone/ PKGNAMEPREFIX= lib diff --git a/net/libmateweather/Makefile b/net/libmateweather/Makefile index fd9a9aeb7b2..166be73d7c1 100644 --- a/net/libmateweather/Makefile +++ b/net/libmateweather/Makefile @@ -2,6 +2,7 @@ PORTNAME= libmateweather PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= net mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/net/liferea/Makefile b/net/liferea/Makefile index 2778b23fae9..aaaa81bbf0a 100644 --- a/net/liferea/Makefile +++ b/net/liferea/Makefile @@ -2,6 +2,7 @@ PORTNAME= liferea DISTVERSION= 1.12.7 +PORTREVISION= 1 CATEGORIES= net gnome MASTER_SITES= https://github.com/lwindolf/liferea/releases/download/v${PORTVERSION:S/.r/-rc/}/ diff --git a/net/mobile-broadband-provider-info/Makefile b/net/mobile-broadband-provider-info/Makefile index f8fc870929b..a1f59f3639c 100644 --- a/net/mobile-broadband-provider-info/Makefile +++ b/net/mobile-broadband-provider-info/Makefile @@ -1,5 +1,6 @@ PORTNAME= mobile-broadband-provider-info PORTVERSION= 20210805 +PORTREVISION= 1 CATEGORIES= net gnome MASTER_SITES= GNOME diff --git a/net/ns3/Makefile b/net/ns3/Makefile index e0f6590fcbb..4041a6b9d85 100644 --- a/net/ns3/Makefile +++ b/net/ns3/Makefile @@ -1,5 +1,6 @@ PORTNAME= ns3 PORTVERSION= 3.35 +PORTREVISION= 1 CATEGORIES= net education MASTER_SITES= https://www.nsnam.org/release/ DISTNAME= ns-allinone-${PORTVERSION} diff --git a/net/opensips31/Makefile b/net/opensips31/Makefile index 12bea7b0743..2538b654c7f 100644 --- a/net/opensips31/Makefile +++ b/net/opensips31/Makefile @@ -1,6 +1,6 @@ PORTNAME= opensips31 DISTVERSION= 3.1.6 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= net MASTER_SITES= https://opensips.org/pub/opensips/${DISTVERSION}/ DISTNAME= opensips-${DISTVERSION} diff --git a/net/p5-Net-OpenSSH/Makefile b/net/p5-Net-OpenSSH/Makefile index b421d3ad16f..1f5d53fe68d 100644 --- a/net/p5-Net-OpenSSH/Makefile +++ b/net/p5-Net-OpenSSH/Makefile @@ -1,5 +1,5 @@ PORTNAME= Net-OpenSSH -PORTVERSION= 0.80 +PORTVERSION= 0.82 CATEGORIES= net perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/net/p5-Net-OpenSSH/distinfo b/net/p5-Net-OpenSSH/distinfo index 5c80c9dc445..f5b324ced8f 100644 --- a/net/p5-Net-OpenSSH/distinfo +++ b/net/p5-Net-OpenSSH/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1601485411 -SHA256 (Net-OpenSSH-0.80.tar.gz) = fee093657df2b361472a2982859588a6e4bc5e1aee8118ece5eebfeafa8daeb3 -SIZE (Net-OpenSSH-0.80.tar.gz) = 76893 +TIMESTAMP = 1647264486 +SHA256 (Net-OpenSSH-0.82.tar.gz) = d41aa24dd53466753209f5a67c6392e6f3fa599709169342cbcc5f4871d97e83 +SIZE (Net-OpenSSH-0.82.tar.gz) = 76431 diff --git a/net/pacemaker1/Makefile b/net/pacemaker1/Makefile index 33515935dd6..f809303fc41 100644 --- a/net/pacemaker1/Makefile +++ b/net/pacemaker1/Makefile @@ -1,7 +1,7 @@ # Created by: David Shane Holden PORTVERSION= 1.1.24 -PORTREVISION= 2 +PORTREVISION= 3 PKGNAMESUFFIX= 1 .include "${.CURDIR}/Makefile.common" diff --git a/net/pacemaker2/Makefile b/net/pacemaker2/Makefile index 59811254cfd..79e6cebd46a 100644 --- a/net/pacemaker2/Makefile +++ b/net/pacemaker2/Makefile @@ -1,6 +1,7 @@ # Created by: David Shane Holden PORTVERSION= 2.1.2 +PORTREVISION= 1 PKGNAMESUFFIX= 2 .include "${.CURDIR}/../pacemaker1/Makefile.common" diff --git a/net/pecl-xmlrpc/Makefile b/net/pecl-xmlrpc/Makefile index 51c280c334f..c212a688627 100644 --- a/net/pecl-xmlrpc/Makefile +++ b/net/pecl-xmlrpc/Makefile @@ -2,6 +2,7 @@ PORTNAME= xmlrpc DISTVERSION= 1.0.0RC3 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= net pear diff --git a/net/php74-xmlrpc/Makefile b/net/php74-xmlrpc/Makefile index 2c8c17a3ac0..abca2a643d6 100644 --- a/net/php74-xmlrpc/Makefile +++ b/net/php74-xmlrpc/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= net MASTERDIR= ${.CURDIR}/../../lang/php74 diff --git a/net/php80-soap/Makefile b/net/php80-soap/Makefile index 9188b01d876..0298f4ff707 100644 --- a/net/php80-soap/Makefile +++ b/net/php80-soap/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= net MASTERDIR= ${.CURDIR}/../../lang/php80 diff --git a/net/php81-soap/Makefile b/net/php81-soap/Makefile index fc40ac2c512..edfa1b90068 100644 --- a/net/php81-soap/Makefile +++ b/net/php81-soap/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= net MASTERDIR= ${.CURDIR}/../../lang/php81 diff --git a/net/py-amqplib/files/patch-2to3 b/net/py-amqplib/files/patch-2to3 new file mode 100644 index 00000000000..7b54af46f99 --- /dev/null +++ b/net/py-amqplib/files/patch-2to3 @@ -0,0 +1,69 @@ +--- amqplib/client_0_8/method_framing.py.orig 2011-03-29 17:09:17 UTC ++++ amqplib/client_0_8/method_framing.py +@@ -18,7 +18,7 @@ Convert between frames and higher-level AMQP methods + # License along with this library; if not, write to the Free Software + # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + +-from Queue import Empty, Queue ++from queue import Empty, Queue + from struct import pack, unpack + + try: +@@ -49,9 +49,9 @@ except: + return result + + +-from basic_message import Message +-from exceptions import * +-from serialization import AMQPReader ++from .basic_message import Message ++from .exceptions import * ++from .serialization import AMQPReader + + __all__ = [ + 'MethodReader', +@@ -131,7 +131,7 @@ class MethodReader(object): + while self.queue.empty(): + try: + frame_type, channel, payload = self.source.read_frame() +- except Exception, e: ++ except Exception as e: + # + # Connection was closed? Framing Error? + # +@@ -241,7 +241,7 @@ class MethodWriter(object): + # problem with the content properties, before sending the + # first frame + body = content.body +- if isinstance(body, unicode): ++ if isinstance(body, str): + coding = content.properties.get('content_encoding', None) + if coding is None: + coding = content.properties['content_encoding'] = 'UTF-8' +@@ -257,5 +257,5 @@ class MethodWriter(object): + self.dest.write_frame(2, channel, payload) + + chunk_size = self.frame_max - 8 +- for i in xrange(0, len(body), chunk_size): ++ for i in range(0, len(body), chunk_size): + self.dest.write_frame(3, channel, body[i:i+chunk_size]) +--- amqplib/client_0_8/transport.py.orig 2011-09-28 22:10:35 UTC ++++ amqplib/client_0_8/transport.py +@@ -74,7 +74,7 @@ class _AbstractTransport(object): + self.sock = socket.socket(af, socktype, proto) + self.sock.settimeout(connect_timeout) + self.sock.connect(sa) +- except socket.error, msg: ++ except socket.error as msg: + self.sock.close() + self.sock = None + continue +@@ -82,7 +82,7 @@ class _AbstractTransport(object): + + if not self.sock: + # Didn't connect, return the most recent error message +- raise socket.error, msg ++ raise socket.error(msg) + + self.sock.settimeout(None) + self.sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) diff --git a/net/py-errbot/Makefile b/net/py-errbot/Makefile index ef6775f8001..664ff28a462 100644 --- a/net/py-errbot/Makefile +++ b/net/py-errbot/Makefile @@ -30,4 +30,17 @@ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +OPTIONS_DEFINE= IRC SLACK_RTM TELEGRAM XMPP +IRC_DESC= IRC messaging support +SLACK-RTM_DESC= Slack messaging support +TELEGRAM_DESC= Telegram messaging support +XMPP_DESC= XMPP messaging support + +IRC_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}irc>=0:irc/py-irc@${PY_FLAVOR} +SLACK-RTM_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}slackclient>=2.0:net-im/py-slackclient@${PY_FLAVOR} +TELEGRAM_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}python-telegram-bot>=0:net-im/py-python-telegram-bot@${PY_FLAVOR} +XMPP_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pyasn1>=0:devel/py-pyasn1@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pyasn1-modules>=0:devel/py-pyasn1-modules@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}slixmpp>=0:net-im/py-slixmpp@${PY_FLAVOR} + .include diff --git a/net/py-gdown/Makefile b/net/py-gdown/Makefile index 410d8d8f962..2bc181e62c7 100644 --- a/net/py-gdown/Makefile +++ b/net/py-gdown/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= gdown -PORTVERSION= 4.3.1 +PORTVERSION= 4.4.0 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-gdown/distinfo b/net/py-gdown/distinfo index d8fc7ffe21f..d6291cbfce9 100644 --- a/net/py-gdown/distinfo +++ b/net/py-gdown/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058072 -SHA256 (gdown-4.3.1.tar.gz) = 645cb5ff7648f99bbf7d96ce86cc0da1194a1425125dc3cefdb25eb52922c871 -SIZE (gdown-4.3.1.tar.gz) = 13846 +TIMESTAMP = 1647264612 +SHA256 (gdown-4.4.0.tar.gz) = 18fc3a4da4a2273deb7aa29c7486be4df3919d904158ad6a6a3e25c8115470d7 +SIZE (gdown-4.4.0.tar.gz) = 14204 diff --git a/net/py-matrix-synapse-ldap3/Makefile b/net/py-matrix-synapse-ldap3/Makefile index b2d42129a89..5efa5ec6862 100644 --- a/net/py-matrix-synapse-ldap3/Makefile +++ b/net/py-matrix-synapse-ldap3/Makefile @@ -1,7 +1,7 @@ # Created by: Mark Felder PORTNAME= matrix-synapse-ldap3 -PORTVERSION= 0.1.5 +PORTVERSION= 0.2.0 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,11 +13,15 @@ LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}ldap3>=2.8:net/py-ldap3@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}service_identity>=0:security/py-service_identity@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}twisted>=15.1.0:devel/py-twisted@${PY_FLAVOR} -USES= python:3.6+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +post-patch: + @${CP} ${FILESDIR}/setup.py ${WRKSRC}/ + .include diff --git a/net/py-matrix-synapse-ldap3/distinfo b/net/py-matrix-synapse-ldap3/distinfo index b9cc3ac07ad..67d21fbc1b3 100644 --- a/net/py-matrix-synapse-ldap3/distinfo +++ b/net/py-matrix-synapse-ldap3/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1624189741 -SHA256 (matrix-synapse-ldap3-0.1.5.tar.gz) = 9fdf8df7c8ec756642aa0fea53b31c0b2f1924f70d7f049a2090b523125456fe -SIZE (matrix-synapse-ldap3-0.1.5.tar.gz) = 19086 +TIMESTAMP = 1647264614 +SHA256 (matrix-synapse-ldap3-0.2.0.tar.gz) = 91a0715b43a41ec3033244174fca20846836da98fda711fb01687f7199eecd2e +SIZE (matrix-synapse-ldap3-0.2.0.tar.gz) = 20185 diff --git a/net/py-matrix-synapse-ldap3/files/setup.py b/net/py-matrix-synapse-ldap3/files/setup.py new file mode 100644 index 00000000000..606849326a4 --- /dev/null +++ b/net/py-matrix-synapse-ldap3/files/setup.py @@ -0,0 +1,3 @@ +from setuptools import setup + +setup() diff --git a/net/py-pyroute2.core/Makefile b/net/py-pyroute2.core/Makefile index 8016c0dc283..d8dd5274c52 100644 --- a/net/py-pyroute2.core/Makefile +++ b/net/py-pyroute2.core/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.core -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.core/distinfo b/net/py-pyroute2.core/distinfo index b9e28a2b893..4d5b4067ef5 100644 --- a/net/py-pyroute2.core/distinfo +++ b/net/py-pyroute2.core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058076 -SHA256 (pyroute2.core-0.6.7.tar.gz) = 37be1c41c5b1e7b95820f8904cbfbf11ce4a7f5ae074cad192460877268b8cde -SIZE (pyroute2.core-0.6.7.tar.gz) = 211378 +TIMESTAMP = 1647264618 +SHA256 (pyroute2.core-0.6.8.tar.gz) = 824bebd45805588d5ebab8dbb94621623d03e1e653201b033115d25ab2a208fe +SIZE (pyroute2.core-0.6.8.tar.gz) = 212377 diff --git a/net/py-pyroute2.ethtool/Makefile b/net/py-pyroute2.ethtool/Makefile index 796443d8429..e35d9d24095 100644 --- a/net/py-pyroute2.ethtool/Makefile +++ b/net/py-pyroute2.ethtool/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.ethtool -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.ethtool/distinfo b/net/py-pyroute2.ethtool/distinfo index e3f70399d5d..4cc2b105786 100644 --- a/net/py-pyroute2.ethtool/distinfo +++ b/net/py-pyroute2.ethtool/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058078 -SHA256 (pyroute2.ethtool-0.6.7.tar.gz) = 8f03e0ef1726414dc5e1c56bb3619553105757ac1a324ae79b26641e8da42d1d -SIZE (pyroute2.ethtool-0.6.7.tar.gz) = 28690 +TIMESTAMP = 1647264620 +SHA256 (pyroute2.ethtool-0.6.8.tar.gz) = fbf07e6103a5bb3c866269f0019a27b238197a264f625f0b849f4e8e7bd5ffae +SIZE (pyroute2.ethtool-0.6.8.tar.gz) = 28811 diff --git a/net/py-pyroute2.ipset/Makefile b/net/py-pyroute2.ipset/Makefile index 6ed890021ed..81acfc9afda 100644 --- a/net/py-pyroute2.ipset/Makefile +++ b/net/py-pyroute2.ipset/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.ipset -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.ipset/distinfo b/net/py-pyroute2.ipset/distinfo index 2c198a11258..5702fc500dc 100644 --- a/net/py-pyroute2.ipset/distinfo +++ b/net/py-pyroute2.ipset/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058080 -SHA256 (pyroute2.ipset-0.6.7.tar.gz) = ae834b3579a75b6edaf93831b4c8bc1cee611786a0c5d8d0d0332c616e8ee387 -SIZE (pyroute2.ipset-0.6.7.tar.gz) = 31448 +TIMESTAMP = 1647264622 +SHA256 (pyroute2.ipset-0.6.8.tar.gz) = 8f6bc44327eeb8c1f4411b57c3a69c5fd00cbab790c20792f80a7b9b6b8ff4e6 +SIZE (pyroute2.ipset-0.6.8.tar.gz) = 31560 diff --git a/net/py-pyroute2.minimal/Makefile b/net/py-pyroute2.minimal/Makefile index 017003d1d43..f921b58931f 100644 --- a/net/py-pyroute2.minimal/Makefile +++ b/net/py-pyroute2.minimal/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.minimal -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.minimal/distinfo b/net/py-pyroute2.minimal/distinfo index 846fc93eaaa..af3a13fa47e 100644 --- a/net/py-pyroute2.minimal/distinfo +++ b/net/py-pyroute2.minimal/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058082 -SHA256 (pyroute2.minimal-0.6.7.tar.gz) = 3db0b4bcaf367b422fa1df047a4fe092fbf1584c3c4878f570e529e0a965baa2 -SIZE (pyroute2.minimal-0.6.7.tar.gz) = 21125 +TIMESTAMP = 1647264624 +SHA256 (pyroute2.minimal-0.6.8.tar.gz) = 7d2845c30b80899db4f07985c7b66e54978a69d4725cc39c16cafc54d219247c +SIZE (pyroute2.minimal-0.6.8.tar.gz) = 21249 diff --git a/net/py-pyroute2.ndb/Makefile b/net/py-pyroute2.ndb/Makefile index 89b707e90e0..9c9be228e6e 100644 --- a/net/py-pyroute2.ndb/Makefile +++ b/net/py-pyroute2.ndb/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.ndb -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.ndb/distinfo b/net/py-pyroute2.ndb/distinfo index 3fe19f2a389..38425bb3f50 100644 --- a/net/py-pyroute2.ndb/distinfo +++ b/net/py-pyroute2.ndb/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058084 -SHA256 (pyroute2.ndb-0.6.7.tar.gz) = a3fae66cec1f4543f0fe0d4273fe68b6a2af19edc1af7cb88925da77f3df4cfc -SIZE (pyroute2.ndb-0.6.7.tar.gz) = 80358 +TIMESTAMP = 1647264626 +SHA256 (pyroute2.ndb-0.6.8.tar.gz) = f740c06d70a4a081546d6668b4a6240921ac42f6fb2eb1297fac460af88fe34c +SIZE (pyroute2.ndb-0.6.8.tar.gz) = 81336 diff --git a/net/py-pyroute2.nftables/Makefile b/net/py-pyroute2.nftables/Makefile index 0a793d8cc2e..2e38032e49d 100644 --- a/net/py-pyroute2.nftables/Makefile +++ b/net/py-pyroute2.nftables/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.nftables -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.nftables/distinfo b/net/py-pyroute2.nftables/distinfo index 81306a2b0fe..bd5f08fe7e5 100644 --- a/net/py-pyroute2.nftables/distinfo +++ b/net/py-pyroute2.nftables/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058086 -SHA256 (pyroute2.nftables-0.6.7.tar.gz) = 77d2dd5f105019ae343261ae2b670538e108ed7e18d693efd0e6d6f67ed9523a -SIZE (pyroute2.nftables-0.6.7.tar.gz) = 26328 +TIMESTAMP = 1647264628 +SHA256 (pyroute2.nftables-0.6.8.tar.gz) = 48d79bc6cd26085108e1b7a3b8b31e537c2b3bc299653d5fae77d0c3c1a4a3a1 +SIZE (pyroute2.nftables-0.6.8.tar.gz) = 26428 diff --git a/net/py-pyroute2.nslink/Makefile b/net/py-pyroute2.nslink/Makefile index 69f1265d8fa..567dc2aaa8a 100644 --- a/net/py-pyroute2.nslink/Makefile +++ b/net/py-pyroute2.nslink/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.nslink -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.nslink/distinfo b/net/py-pyroute2.nslink/distinfo index 71930ff6157..fa410ce4a6c 100644 --- a/net/py-pyroute2.nslink/distinfo +++ b/net/py-pyroute2.nslink/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058088 -SHA256 (pyroute2.nslink-0.6.7.tar.gz) = a7e537639bca0b1baf325ff234a95acb9eed954e062adb4226b03072d29ae536 -SIZE (pyroute2.nslink-0.6.7.tar.gz) = 29943 +TIMESTAMP = 1647264630 +SHA256 (pyroute2.nslink-0.6.8.tar.gz) = 6d31b35e8b84755505693e66c467e89c5bb768e33164a4530cc68715c8a87de7 +SIZE (pyroute2.nslink-0.6.8.tar.gz) = 30062 diff --git a/net/py-pyroute2.protocols/Makefile b/net/py-pyroute2.protocols/Makefile index 16632098b04..fbf16a57b57 100644 --- a/net/py-pyroute2.protocols/Makefile +++ b/net/py-pyroute2.protocols/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2.protocols -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2.protocols/distinfo b/net/py-pyroute2.protocols/distinfo index 006057e0663..20d7ce37987 100644 --- a/net/py-pyroute2.protocols/distinfo +++ b/net/py-pyroute2.protocols/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058090 -SHA256 (pyroute2.protocols-0.6.7.tar.gz) = 19daaeaae6d0714f9ec73c627dbf1419a9a449432147de23f9ab85f08e9eeff4 -SIZE (pyroute2.protocols-0.6.7.tar.gz) = 27090 +TIMESTAMP = 1647264632 +SHA256 (pyroute2.protocols-0.6.8.tar.gz) = 974ec21306730f1ca6f90027285144a57ebdd114ad380fab03031788477b14eb +SIZE (pyroute2.protocols-0.6.8.tar.gz) = 27177 diff --git a/net/py-pyroute2/Makefile b/net/py-pyroute2/Makefile index 13e1ef14a05..7231a04982c 100644 --- a/net/py-pyroute2/Makefile +++ b/net/py-pyroute2/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pyroute2 -PORTVERSION= 0.6.7 +PORTVERSION= 0.6.8 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-pyroute2/distinfo b/net/py-pyroute2/distinfo index bd8ab8e3528..20c1bbe5d95 100644 --- a/net/py-pyroute2/distinfo +++ b/net/py-pyroute2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058074 -SHA256 (pyroute2-0.6.7.tar.gz) = a81bfd8ec8728344bd21385dad71d315c6f69de40f0e65119ad49ba9808a956a -SIZE (pyroute2-0.6.7.tar.gz) = 23537 +TIMESTAMP = 1647264616 +SHA256 (pyroute2-0.6.8.tar.gz) = 5a1aa3bc2c0aad45012cabe3a6f6bba5cf7850866f4dfee3ef4e41b760c7c8a4 +SIZE (pyroute2-0.6.8.tar.gz) = 23666 diff --git a/net/py-pyroute2/files/patch-setup.cfg b/net/py-pyroute2/files/patch-setup.cfg index 06807bb7bdf..9409ad71796 100644 --- a/net/py-pyroute2/files/patch-setup.cfg +++ b/net/py-pyroute2/files/patch-setup.cfg @@ -3,10 +3,10 @@ Reference: https://github.com/svinota/pyroute2/discussions/788 --- setup.cfg.orig 2022-02-21 07:41:28 UTC +++ setup.cfg @@ -33,7 +33,6 @@ install_requires = - pyroute2.nftables==0.6.7 - pyroute2.ethtool==0.6.7 - pyroute2.ipset==0.6.7 -- pyroute2.ipdb==0.6.7 - pyroute2.ndb==0.6.7 + pyroute2.nftables==0.6.8 + pyroute2.ethtool==0.6.8 + pyroute2.ipset==0.6.8 +- pyroute2.ipdb==0.6.8 + pyroute2.ndb==0.6.8 packages_dir = =pyroute2 diff --git a/net/py-python-barbicanclient/Makefile b/net/py-python-barbicanclient/Makefile index bf285e84200..2befed9ba6d 100644 --- a/net/py-python-barbicanclient/Makefile +++ b/net/py-python-barbicanclient/Makefile @@ -1,7 +1,7 @@ # Created by: Alexander Nusov PORTNAME= python-barbicanclient -PORTVERSION= 5.2.0 +PORTVERSION= 5.3.0 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-python-barbicanclient/distinfo b/net/py-python-barbicanclient/distinfo index c6063c72c11..124cf205535 100644 --- a/net/py-python-barbicanclient/distinfo +++ b/net/py-python-barbicanclient/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632037196 -SHA256 (python-barbicanclient-5.2.0.tar.gz) = 9e69572aa11700c41fc126b26de5a7f79d3f0638bd81a61676597cd0e7cee702 -SIZE (python-barbicanclient-5.2.0.tar.gz) = 119411 +TIMESTAMP = 1647264634 +SHA256 (python-barbicanclient-5.3.0.tar.gz) = 7c5b0faede5c308389fc1970d90da1d3c32827b56742bca68e357106de0e8b98 +SIZE (python-barbicanclient-5.3.0.tar.gz) = 119436 diff --git a/net/py-python-cinderclient/Makefile b/net/py-python-cinderclient/Makefile index 3a833195b40..258a1ea7c57 100644 --- a/net/py-python-cinderclient/Makefile +++ b/net/py-python-cinderclient/Makefile @@ -1,7 +1,7 @@ # Created by: Alexander Nusov PORTNAME= python-cinderclient -PORTVERSION= 8.2.0 +PORTVERSION= 8.3.0 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-python-cinderclient/distinfo b/net/py-python-cinderclient/distinfo index c2f9821d60b..609bd586423 100644 --- a/net/py-python-cinderclient/distinfo +++ b/net/py-python-cinderclient/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643577012 -SHA256 (python-cinderclient-8.2.0.tar.gz) = 7b2f08a2d1cc05d2c1f84f02fadb2208678b1acb501acfe2de33720078ec7b9f -SIZE (python-cinderclient-8.2.0.tar.gz) = 233148 +TIMESTAMP = 1647264636 +SHA256 (python-cinderclient-8.3.0.tar.gz) = e00103875029dc85cbb59131d00ccc8534f692956acde32b5a3cc5af4c24580b +SIZE (python-cinderclient-8.3.0.tar.gz) = 234371 diff --git a/net/py-python-heatclient/Makefile b/net/py-python-heatclient/Makefile index 3d49eb4e734..2b92c099b78 100644 --- a/net/py-python-heatclient/Makefile +++ b/net/py-python-heatclient/Makefile @@ -1,7 +1,7 @@ # Created by: Alexander Nusov PORTNAME= python-heatclient -PORTVERSION= 2.5.0 +PORTVERSION= 2.5.1 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-python-heatclient/distinfo b/net/py-python-heatclient/distinfo index cf8973862b9..2e6333ea3e0 100644 --- a/net/py-python-heatclient/distinfo +++ b/net/py-python-heatclient/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642102329 -SHA256 (python-heatclient-2.5.0.tar.gz) = b610748eb3f18f6bd762e0808accdf872308289a77c3b19ed2d8b9f306393a42 -SIZE (python-heatclient-2.5.0.tar.gz) = 178889 +TIMESTAMP = 1647264638 +SHA256 (python-heatclient-2.5.1.tar.gz) = de5ed7cb12a6d7c0403350e136c0a6470719476db8fbc9bf8d0d581ebc0b1c2b +SIZE (python-heatclient-2.5.1.tar.gz) = 178919 diff --git a/net/py-python-novaclient/Makefile b/net/py-python-novaclient/Makefile index da647a81d38..ef5b99b26d9 100644 --- a/net/py-python-novaclient/Makefile +++ b/net/py-python-novaclient/Makefile @@ -1,7 +1,7 @@ # Created by: Roman Bogorodskiy PORTNAME= python-novaclient -PORTVERSION= 17.6.0 +PORTVERSION= 17.7.0 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-python-novaclient/distinfo b/net/py-python-novaclient/distinfo index c12f7d83d36..7cdb24f7e54 100644 --- a/net/py-python-novaclient/distinfo +++ b/net/py-python-novaclient/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641136641 -SHA256 (python-novaclient-17.6.0.tar.gz) = c910c2085310da635fb343585f1070712ff0f9cb3c8f79d44ca3d632c4f230f5 -SIZE (python-novaclient-17.6.0.tar.gz) = 335297 +TIMESTAMP = 1647264640 +SHA256 (python-novaclient-17.7.0.tar.gz) = 4ebc27f4ce06c155b8e991a44463b9358596e6856cf171c9af8dc7568d868bed +SIZE (python-novaclient-17.7.0.tar.gz) = 335462 diff --git a/net/py-python-openstackclient/Makefile b/net/py-python-openstackclient/Makefile index 955d55d0d8d..d6572cce854 100644 --- a/net/py-python-openstackclient/Makefile +++ b/net/py-python-openstackclient/Makefile @@ -1,7 +1,7 @@ # Created by: Alexander Nusov PORTNAME= python-openstackclient -PORTVERSION= 5.7.0 +PORTVERSION= 5.8.0 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -16,7 +16,7 @@ BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pbr>=2.0.0:devel/py-pbr@${PY_FLAVOR} RUN_DEPENDS= \ ${PYTHON_PKGNAMEPREFIX}cliff>=3.5.0:devel/py-cliff@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}iso8601>=0.1.11:devel/py-iso8601@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}openstacksdk>=0.56.0:devel/py-openstacksdk@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}openstacksdk>=0.61.0:devel/py-openstacksdk@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}osc-lib>=2.3.0:devel/py-osc-lib@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}oslo.i18n>=3.15.3:devel/py-oslo.i18n@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}oslo.utils>=3.33.0:devel/py-oslo.utils@${PY_FLAVOR} \ diff --git a/net/py-python-openstackclient/distinfo b/net/py-python-openstackclient/distinfo index c5869fe6ba4..8b5a532bb96 100644 --- a/net/py-python-openstackclient/distinfo +++ b/net/py-python-openstackclient/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1644001169 -SHA256 (python-openstackclient-5.7.0.tar.gz) = c65e3d51018f193cce2daf3d0fd69daa36003bdb2b85df6b07b973e4c39e2f92 -SIZE (python-openstackclient-5.7.0.tar.gz) = 817269 +TIMESTAMP = 1647264642 +SHA256 (python-openstackclient-5.8.0.tar.gz) = 334852df8897b95f0581ec12ee287de8c7a9289a208a18f0a8b38777019fd986 +SIZE (python-openstackclient-5.8.0.tar.gz) = 827602 diff --git a/net/py-python-socks/Makefile b/net/py-python-socks/Makefile index a9af04fdf2b..6cb62ce963f 100644 --- a/net/py-python-socks/Makefile +++ b/net/py-python-socks/Makefile @@ -2,6 +2,7 @@ PORTNAME= python-socks PORTVERSION= 2.0.3 +PORTREVISION= 1 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -14,7 +15,6 @@ LICENSE_FILE= ${WRKSRC}/LICENSE.txt RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}anyio>=3.3.4:devel/py-anyio@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}async_timeout>=3.0.1:devel/py-async_timeout@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}asyncio>=3.3.4:devel/py-asyncio@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}curio>=1.4:devel/py-curio@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}trio>=0.16.0:net/py-trio@${PY_FLAVOR} diff --git a/net/py-qt5-networkauth/Makefile b/net/py-qt5-networkauth/Makefile index 987881f74f7..417b283b834 100644 --- a/net/py-qt5-networkauth/Makefile +++ b/net/py-qt5-networkauth/Makefile @@ -1,5 +1,6 @@ PORTNAME= networkauth PORTVERSION= ${PYQTNETWORKAUTH_VERSION} +PORTREVISION= 1 CATEGORIES= net devel python MASTER_SITES= ${MASTER_SITES_PYQTNETWORKAUTH} PKGNAMEPREFIX= ${PYQT_PY_RELNAME}- diff --git a/net/py-softlayer/Makefile b/net/py-softlayer/Makefile index 3fd78a35ea7..fb7748116b3 100644 --- a/net/py-softlayer/Makefile +++ b/net/py-softlayer/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= softlayer -PORTVERSION= 5.9.9 +PORTVERSION= 6.0.1 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -14,7 +14,7 @@ LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}click>=7:devel/py-click@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}ptable>=0.9.2:devel/py-ptable@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}prettytable>=2.0.0:devel/py-prettytable@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}prompt-toolkit>=2:devel/py-prompt-toolkit@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pygments>=2.0.0:textproc/py-pygments@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}requests>=2.20.0:www/py-requests@${PY_FLAVOR} \ diff --git a/net/py-softlayer/distinfo b/net/py-softlayer/distinfo index 03dabf4ab12..117e59b7799 100644 --- a/net/py-softlayer/distinfo +++ b/net/py-softlayer/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058100 -SHA256 (SoftLayer-5.9.9.tar.gz) = 1403f0927d84131ad739fed1cbd216abc7e5cd489c266a5a3581251e360cbb6f -SIZE (SoftLayer-5.9.9.tar.gz) = 439072 +TIMESTAMP = 1647264644 +SHA256 (SoftLayer-6.0.1.tar.gz) = 0bad1b5b559d8d3f7499ef0e027571a5531795d509cd20a40217ae8d32d1ee98 +SIZE (SoftLayer-6.0.1.tar.gz) = 442762 diff --git a/net/py-stomp.py/files/patch-2to3 b/net/py-stomp.py/files/patch-2to3 new file mode 100644 index 00000000000..795a89f846b --- /dev/null +++ b/net/py-stomp.py/files/patch-2to3 @@ -0,0 +1,13 @@ +--- stomp/backwardsock25.py.orig 2016-11-13 14:00:03 UTC ++++ stomp/backwardsock25.py +@@ -25,8 +25,8 @@ def get_socket(host, port, timeout=None): + sock.connect(sa) + return sock + +- except error, msg: ++ except error as msg: + if sock is not None: + sock.close() + +- raise error, ERRMSG ++ raise error(ERRMSG) diff --git a/net/py-suds-jurko/Makefile b/net/py-suds-jurko/Makefile index 5178c657a08..de6ac3a3463 100644 --- a/net/py-suds-jurko/Makefile +++ b/net/py-suds-jurko/Makefile @@ -2,7 +2,7 @@ PORTNAME= suds-jurko PORTVERSION= 0.6 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= net python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/net/py-suds-jurko/files/patch-setup.py b/net/py-suds-jurko/files/patch-setup.py index bc6ef4ccb23..f92552ec243 100644 --- a/net/py-suds-jurko/files/patch-setup.py +++ b/net/py-suds-jurko/files/patch-setup.py @@ -1,4 +1,4 @@ ---- setup.py.orig 2021-09-20 17:08:56 UTC +--- setup.py.orig 2022-03-24 18:36:13 UTC +++ setup.py @@ -91,17 +91,6 @@ if sys.version_info >= (2, 5): # distutils.setup() 'obsoletes' parameter not introduced until Python 2.5. @@ -18,3 +18,12 @@ # Wrap long_description at 72 characters since PKG-INFO package distribution # metadata file stores this text with an 8 space indentation. long_description = """ +@@ -167,7 +156,7 @@ setup( + keywords=["SOAP", "web", "service", "client"], + url=project_url, + download_url=download_url, +- packages=find_packages(), ++ packages=find_packages(exclude=['tests*']), + + # 'maintainer' will be listed as the distribution package author. + # Warning: Due to a 'distribute' package defect when used with Python 3 diff --git a/net/py-tofu/files/patch-2to3 b/net/py-tofu/files/patch-2to3 new file mode 100644 index 00000000000..5aa769e231d --- /dev/null +++ b/net/py-tofu/files/patch-2to3 @@ -0,0 +1,300 @@ +--- __init__.py.orig 2006-05-25 20:37:11 UTC ++++ __init__.py +@@ -208,7 +208,7 @@ You should provide your own Idler (Soya 3D includes a + global IDLER + IDLER = self + +- print "* Tofu * IDLER created !" ++ print("* Tofu * IDLER created !") + + def stop(self, *return_values): + """Idler.stop() +@@ -290,7 +290,7 @@ This static method returns the object of the given UID + """Unique.hasuid(uid) -> bool + + This static method returns true if an object with the given UID exists.""" +- return Unique._alls.has_key(uid) ++ return uid in Unique._alls + hasuid = staticmethod(hasuid) + + def loaded(self): +@@ -395,7 +395,7 @@ Delete a SavedInAPath's file.""" + Unique.discard(self) + + filename = filename or os.path.join(path[0], self.DIRNAME, self.filename.replace("/", os.sep)) + ".data" +- print "* Tofu * Deleting %s %s (file %s) !" % (self.__class__.__name__, self.filename, filename) ++ print("* Tofu * Deleting %s %s (file %s) !" % (self.__class__.__name__, self.filename, filename)) + os.remove(filename) + + def get_filename(self): return self._filename +@@ -417,13 +417,13 @@ all Players).""" + for p in path: + for filename in dircache.listdir(os.path.join(p, klass.DIRNAME)): + if filename.endswith(".data"): filenames[filename[:-5]] = 1 +- filenames = filenames.keys() ++ filenames = list(filenames.keys()) + filenames.sort() + return filenames + availables = classmethod(availables) + + def discard(self): +- print "* Tofu * Discard %s %s %s..." % (self.__class__.__name__.lower(), self.filename, self.uid) ++ print("* Tofu * Discard %s %s %s..." % (self.__class__.__name__.lower(), self.filename, self.uid)) + del self._alls2[self.filename] + Unique.discard(self) + +@@ -487,7 +487,7 @@ store some data on the client side. Default implementa + + SavedInAPath.__init__(self) + +- print "* Tofu * Creating new player %s..." % filename ++ print("* Tofu * Creating new player %s..." % filename) + + self.filename = filename + self.password = password +@@ -516,7 +516,7 @@ Login the Player. + CLIENT_SIDE_DATA contains additional data given by the client; you can use them to + store some data on the client side. Default implementation ignore them.""" + if self.address: raise ValueError("Player %s is already logged !" % self.filename) +- print "* Tofu * Player %s login !" % self.filename ++ print("* Tofu * Player %s login !" % self.filename) + + for mobile in self.mobiles: + if not mobile in mobile.level.mobiles: mobile.level.add_mobile(mobile) +@@ -535,7 +535,7 @@ store some data on the client side. Default implementa + + Logout the Player.""" + if self.address: +- print "* Tofu * Player %s logout !" % self.filename ++ print("* Tofu * Player %s logout !" % self.filename) + try: self.notifier.transport.loseConnection() + except: pass + self.notifier = None +@@ -617,8 +617,8 @@ You should not call this method directly; the level ac + determined by the Mobile of the Level. + You may want to override this method, e.g. to display an active level.""" + if active != self.active: +- if active: print "* Tofu * Level %s %s activated !" % (self.filename, self.uid) +- else: print "* Tofu * Level %s %s inactivated." % (self.filename, self.uid) ++ if active: print("* Tofu * Level %s %s activated !" % (self.filename, self.uid)) ++ else: print("* Tofu * Level %s %s inactivated." % (self.filename, self.uid)) + if self.active: IDLER.remove_level(self) + self.active = active + if self.active: IDLER.add_level(self) +@@ -658,7 +658,7 @@ You may override this method; the default implementati + + This class method discards ALL non-active levels. + It is called by the Idler each begin_round.""" +- for level in clazz._alls2.values(): ++ for level in list(clazz._alls2.values()): + if not level.active: level.discard() + discard_inactives = classmethod(discard_inactives) + +@@ -977,7 +977,7 @@ IF YOU USE CPICKLE OVER NETWORK, YOU ASSUME YOU TRUST + E.g. to use cPickle for local file only, do: + enable_pickle(1, 0) + """ +- import cPickle as pickle ++ import pickle as pickle + global local_serializer, network_serializer + if local : local_serializer = pickle + if network: network_serializer = pickle +--- client.py.orig 2006-05-21 15:57:53 UTC ++++ client.py +@@ -36,7 +36,7 @@ import tofu + try: set + except: from sets import Set as set + +-class ClientServerError(StandardError): pass ++class ClientServerError(Exception): pass + + + class UDP(DatagramProtocol): +@@ -44,8 +44,9 @@ class UDP(DatagramProtocol): + a = "hello" + self.transport.write(a, ("127.0.0.1", 9999)) + +- def datagramReceived(self, data, (host, port)): +- print "UDP received %r from %s:%d" % (data, host, port) ++ def datagramReceived(self, data, xxx_todo_changeme): ++ (host, port) = xxx_todo_changeme ++ print("UDP received %r from %s:%d" % (data, host, port)) + + PLANNED_ARRIVAL_UIDS = set() + +@@ -117,7 +118,7 @@ class Notifier(NetstringReceiver, tofu.Notifier): + + elif code == tofu.CODE_OWN_CONTROL: + uid = struct.unpack("!i", data[1:])[0] +- print "* Tofu * Owning mobile %s..." % uid ++ print("* Tofu * Owning mobile %s..." % uid) + #def own_control(mobile): + # print "own_control", mobile.level + # tofu.IDLER.next_round_tasks.append(mobile.control_owned) +@@ -130,7 +131,7 @@ class Notifier(NetstringReceiver, tofu.Notifier): + + elif code == tofu.CODE_REMOVE_MOBILE: + uid = struct.unpack("!i", data[1:])[0] +- print "* Tofu * Removing mobile %s..." % uid ++ print("* Tofu * Removing mobile %s..." % uid) + def remove_mobile(mobile): + mobile.level.remove_mobile(mobile) + mobile.discard() +@@ -147,7 +148,7 @@ class Notifier(NetstringReceiver, tofu.Notifier): + elif code == tofu.CODE_ADD_MOBILE: + mobile_uid = struct.unpack("!i", data[1:5])[0] + level_uid = struct.unpack("!i", data[5:9])[0] +- print "* Tofu * Adding mobile %s in level %s..." % (mobile_uid, level_uid) ++ print("* Tofu * Adding mobile %s in level %s..." % (mobile_uid, level_uid)) + def add_mobile(*args): + mobile = tofu.Unique.getbyuid(mobile_uid) + level = tofu.Unique.getbyuid(level_uid ) +@@ -158,15 +159,15 @@ class Notifier(NetstringReceiver, tofu.Notifier): + waiter.start() + + elif code == tofu.CODE_DATA_UNIQUE: +- print "* Tofu * Receiving unique..." ++ print("* Tofu * Receiving unique...") + unique = tofu.Unique.undump(data[1:]) + unique.received() +- assert (not hasattr(unique, "level")) or (not unique.level) or (unique.level in unique.level._alls2.values()), "Level sent with non-level unique !" ++ assert (not hasattr(unique, "level")) or (not unique.level) or (unique.level in list(unique.level._alls2.values())), "Level sent with non-level unique !" + self.arrived(unique) + + elif code == tofu.CODE_ENTER_LEVEL: + uid = struct.unpack("!i", data[1:])[0] +- print "* Tofu * Entering level %s..." % uid ++ print("* Tofu * Entering level %s..." % uid) + #self.uids_arrival_planned.add(uid) # The server will send it + # Previous level is outdated => drop it + if tofu.Unique.hasuid(uid): tofu.Unique.getbyuid(uid).set_active(0) +@@ -175,12 +176,12 @@ class Notifier(NetstringReceiver, tofu.Notifier): + waiter.start() + + elif code == tofu.CODE_ERROR: +- print "* Tofu * Server error: %s" % data[1:] ++ print("* Tofu * Server error: %s" % data[1:]) + #self.errors.append(data[1:]) + raise ClientServerError(data[1:]) + + def arrived(self, unique): +- print "* Tofu * Received unique %s %s." % (unique.uid, unique) ++ print("* Tofu * Received unique %s %s." % (unique.uid, unique)) + + waiters = WAITERS.get(unique.uid) + if waiters: +@@ -192,7 +193,7 @@ class Notifier(NetstringReceiver, tofu.Notifier): + self.uids_arrival_planned.discard(unique.uid) + + def ask_unique(self, uid): +- print "* Tofu * Ask for UID %s..." % uid ++ print("* Tofu * Ask for UID %s..." % uid) + self.uids_arrival_planned.add(uid) + self.sendString(tofu.CODE_ASK_UNIQUE + struct.pack("!i", uid)) + +@@ -222,12 +223,12 @@ class TCPFactory(ClientFactory): + + def clientConnectionFailed(self, connector, reason): + m = reason.getErrorMessage() +- print "* Tofu * Connection failed:", m ++ print("* Tofu * Connection failed:", m) + tofu.GAME_INTERFACE.network_error(m) + + def clientConnectionLost(self, connector, reason): + m = reason.getErrorMessage() +- print "* Tofu * Connection lost:", m ++ print("* Tofu * Connection lost:", m) + tofu.GAME_INTERFACE.network_error(m) + + +--- server.py.orig 2006-05-21 15:58:53 UTC ++++ server.py +@@ -30,14 +30,15 @@ import sys, struct + import tofu + + class UDP(DatagramProtocol): +- def datagramReceived(self, data, (host, port)): +- print "UDP received %r from %s:%d" % (data, host, port) ++ def datagramReceived(self, data, xxx_todo_changeme): ++ (host, port) = xxx_todo_changeme ++ print("UDP received %r from %s:%d" % (data, host, port)) + self.transport.write(data, (host, port)) + + #reactor.listenUDP(6900, UDP()) + + +-class SecurityError(StandardError): ++class SecurityError(Exception): + pass + + +@@ -78,8 +79,8 @@ class PlayerNotifier(NetstringReceiver): + if data[1:] != tofu.VERSION: raise ValueError("Server and client use incompatible version (server: %s, client %s)" % (VERSION, data[1:])) + self.version_checked = 1 + +- except StandardError, e: +- print "* Tofu * Error occured:" ++ except Exception as e: ++ print("* Tofu * Error occured:") + sys.excepthook(*sys.exc_info()) + self.sendString(tofu.CODE_ERROR + "%s: %s" % (e.__class__.__name__, str(e))) + +@@ -97,11 +98,11 @@ class PlayerNotifier(NetstringReceiver): + def connectionLost(self, reason): + Protocol.connectionLost(self, reason) + if self.player: +- print "* Tofu * Connection lost with player %s:" % self.player.filename, reason.getErrorMessage() ++ print("* Tofu * Connection lost with player %s:" % self.player.filename, reason.getErrorMessage()) + self.logout_player() + + def send_unique(self, unique): +- print "* Tofu * Sending unique %s..." % unique.uid ++ print("* Tofu * Sending unique %s..." % unique.uid) + self.sendString(tofu.CODE_DATA_UNIQUE + unique.dump()) + + def notify_state(self, mobile, state): +@@ -128,7 +129,7 @@ class Notifier(tofu.Notifier): + pass + + def notify_state(self, mobile, state): +- for player in tofu.YourPlayer._alls2.values(): # XXX optimize this (maintain a list of player for each level) ++ for player in list(tofu.YourPlayer._alls2.values()): # XXX optimize this (maintain a list of player for each level) + if player.notifier: + for m in player.mobiles: + if m.level is mobile.level: +@@ -136,7 +137,7 @@ class Notifier(tofu.Notifier): + break + + def notify_add_mobile(self, mobile): +- for player in tofu.YourPlayer._alls2.values(): # XXX optimize this ++ for player in list(tofu.YourPlayer._alls2.values()): # XXX optimize this + if player.notifier: + for m in player.mobiles: + if (not m is mobile) and (m.level is mobile.level): +@@ -163,7 +164,7 @@ class Notifier(tofu.Notifier): + player.notifier.notify_enter_level(mobile.level) + + def notify_remove_mobile(self, mobile): +- for player in tofu.YourPlayer._alls2.values(): # XXX optimize this ++ for player in list(tofu.YourPlayer._alls2.values()): # XXX optimize this + if player.notifier: + for m in player.mobiles: + if m.level is mobile.level: +@@ -184,7 +185,7 @@ class Notifier(tofu.Notifier): + if isinstance(unique, tofu.SavedInAPath): unique.save() + + def game_ended(self): +- for player in tofu.Player._alls2.values(): ++ for player in list(tofu.Player._alls2.values()): + if player.notifier: + player.notifier.logout_player() + +@@ -199,7 +200,7 @@ Starts a game server on TCP port PORT.""" + f.protocol = PlayerNotifier + reactor.listenTCP(6900, f) + +- print "* Tofu * Server ready !" ++ print("* Tofu * Server ready !") + try: + tofu.IDLER.idle() + diff --git a/net/rubygem-activestorage52/Makefile b/net/rubygem-activestorage52/Makefile index ccd2bd7e604..7f769126261 100644 --- a/net/rubygem-activestorage52/Makefile +++ b/net/rubygem-activestorage52/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= activestorage -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= net rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/net/rubygem-activestorage52/distinfo b/net/rubygem-activestorage52/distinfo index 6482d597368..1f28ea1bc48 100644 --- a/net/rubygem-activestorage52/distinfo +++ b/net/rubygem-activestorage52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298706 -SHA256 (rubygem/activestorage-5.2.6.gem) = d45ddb05fa5f341482fd1ebacf3d898362d2f300d9f7116e1ac3138d0353df72 -SIZE (rubygem/activestorage-5.2.6.gem) = 44544 +TIMESTAMP = 1647264898 +SHA256 (rubygem/activestorage-5.2.7.gem) = 6449ab9b0ff0842847c45b3426f8ef605638d084c56872b1382dee9a3d3ce94b +SIZE (rubygem/activestorage-5.2.7.gem) = 46592 diff --git a/net/rubygem-activestorage60/Makefile b/net/rubygem-activestorage60/Makefile index 51e0a9ba3dd..c1df2b842bd 100644 --- a/net/rubygem-activestorage60/Makefile +++ b/net/rubygem-activestorage60/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= activestorage -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= net rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/net/rubygem-activestorage60/distinfo b/net/rubygem-activestorage60/distinfo index 6dba732a0f8..863bbd95366 100644 --- a/net/rubygem-activestorage60/distinfo +++ b/net/rubygem-activestorage60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058916 -SHA256 (rubygem/activestorage-6.0.4.6.gem) = bfe40c21ee921c837966c4bc063f2deaa0f13ff241bc708405700bfb94d3b44b -SIZE (rubygem/activestorage-6.0.4.6.gem) = 51200 +TIMESTAMP = 1647264922 +SHA256 (rubygem/activestorage-6.0.4.7.gem) = 4b44d8b8c9f2cc44a1e4b24457848c4595c162246402f6d91dd3575b0b07cc7a +SIZE (rubygem/activestorage-6.0.4.7.gem) = 53248 diff --git a/net/rubygem-activestorage61/Makefile b/net/rubygem-activestorage61/Makefile index 67acac46efb..b92e747ef0a 100644 --- a/net/rubygem-activestorage61/Makefile +++ b/net/rubygem-activestorage61/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= activestorage -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= net rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/net/rubygem-activestorage61/distinfo b/net/rubygem-activestorage61/distinfo index a7c9bfbd4a1..860c1a79628 100644 --- a/net/rubygem-activestorage61/distinfo +++ b/net/rubygem-activestorage61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058942 -SHA256 (rubygem/activestorage-6.1.4.6.gem) = 60908c5cab0bd2acb52ffe5b5fcc22ba8e795f749efc0a7df1f0ca524ac0cfce -SIZE (rubygem/activestorage-6.1.4.6.gem) = 55296 +TIMESTAMP = 1647264948 +SHA256 (rubygem/activestorage-6.1.4.7.gem) = 341e918a47c8d8d70a206e7b72f0c56e0a519c0e28c982a38accebe6f364fda1 +SIZE (rubygem/activestorage-6.1.4.7.gem) = 57344 diff --git a/net/rubygem-activestorage70/Makefile b/net/rubygem-activestorage70/Makefile index 698650e7af8..d77742f974f 100644 --- a/net/rubygem-activestorage70/Makefile +++ b/net/rubygem-activestorage70/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= activestorage -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= net rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/net/rubygem-activestorage70/distinfo b/net/rubygem-activestorage70/distinfo index 1dbc0946b75..45bba17f91a 100644 --- a/net/rubygem-activestorage70/distinfo +++ b/net/rubygem-activestorage70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058968 -SHA256 (rubygem/activestorage-7.0.2.gem) = 8b9cde0a53524190eed9d41e0b3e83f89abd0122dea74fb69968c3826c2ac52b -SIZE (rubygem/activestorage-7.0.2.gem) = 62976 +TIMESTAMP = 1647264974 +SHA256 (rubygem/activestorage-7.0.2.3.gem) = db38d33175cb7db0892b7f16a20c51d821ea3e7e1b1f4deaaebb869a30766b90 +SIZE (rubygem/activestorage-7.0.2.3.gem) = 65024 diff --git a/net/rubygem-fog-google/Makefile b/net/rubygem-fog-google/Makefile index e761119f432..5d7a948b23a 100644 --- a/net/rubygem-fog-google/Makefile +++ b/net/rubygem-fog-google/Makefile @@ -2,6 +2,7 @@ PORTNAME= fog-google PORTVERSION= 1.18.0 +PORTREVISION= 1 CATEGORIES= net rubygems MASTER_SITES= RG @@ -11,7 +12,7 @@ COMMENT= Module for the 'fog' gem to support Google Cloud LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE.md -RUN_DEPENDS= rubygem-fog-core>=0<2.3:devel/rubygem-fog-core \ +RUN_DEPENDS= rubygem-fog-core210>=0<2.3:devel/rubygem-fog-core210 \ rubygem-fog-json>=1.2<2:devel/rubygem-fog-json \ rubygem-fog-xml>=0.1.0<0.2:textproc/rubygem-fog-xml \ rubygem-google-apis-compute_v1>=0.14<1:devel/rubygem-google-apis-compute_v1 \ diff --git a/net/rubygem-gitaly/Makefile b/net/rubygem-gitaly/Makefile index f7c44e6d29b..58aedfc6af9 100644 --- a/net/rubygem-gitaly/Makefile +++ b/net/rubygem-gitaly/Makefile @@ -1,5 +1,5 @@ PORTNAME= gitaly -PORTVERSION= 14.8.0.pre.rc1 +PORTVERSION= 14.9.0.pre.rc4 CATEGORIES= net rubygems MASTER_SITES= RG @@ -9,7 +9,7 @@ COMMENT= Auto-generated gRPC client for gitaly LICENSE= MIT RUN_DEPENDS= rubygem-grpc142>=1.0<2.0:net/rubygem-grpc142 \ - gitaly>=14.8:devel/gitaly + gitaly>=14.9:devel/gitaly USES= cpe gem USE_RUBY= yes diff --git a/net/rubygem-gitaly/distinfo b/net/rubygem-gitaly/distinfo index ba9b344fe1a..6af98ff5a06 100644 --- a/net/rubygem-gitaly/distinfo +++ b/net/rubygem-gitaly/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645607701 -SHA256 (rubygem/gitaly-14.8.0.pre.rc1.gem) = 8c478253076d0d8bf7c8821501457aca656def50f22e1503634b06ea37068836 -SIZE (rubygem/gitaly-14.8.0.pre.rc1.gem) = 25088 +TIMESTAMP = 1647944299 +SHA256 (rubygem/gitaly-14.9.0.pre.rc4.gem) = 261c42da7cb77b2450c89eb13ea66b6b89fcf084b7bc7695e4c047402c05d88f +SIZE (rubygem/gitaly-14.9.0.pre.rc4.gem) = 26112 diff --git a/net/rubygem-google-cloud-logging-v2/Makefile b/net/rubygem-google-cloud-logging-v2/Makefile index 0ec4f4059f2..e4d633bb71d 100644 --- a/net/rubygem-google-cloud-logging-v2/Makefile +++ b/net/rubygem-google-cloud-logging-v2/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-cloud-logging-v2 -PORTVERSION= 0.6.0 +PORTVERSION= 0.7.0 CATEGORIES= net rubygems MASTER_SITES= RG diff --git a/net/rubygem-google-cloud-logging-v2/distinfo b/net/rubygem-google-cloud-logging-v2/distinfo index 09ca3825c16..1d3d0cf8f88 100644 --- a/net/rubygem-google-cloud-logging-v2/distinfo +++ b/net/rubygem-google-cloud-logging-v2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058846 -SHA256 (rubygem/google-cloud-logging-v2-0.6.0.gem) = 3b4fbe8267263694b227d125a1412fe5af4aa66f81fe2ac1648c11fa8c240be9 -SIZE (rubygem/google-cloud-logging-v2-0.6.0.gem) = 92672 +TIMESTAMP = 1647264850 +SHA256 (rubygem/google-cloud-logging-v2-0.7.0.gem) = d9f6dab9e5351400ee927a46751372ef1a7f684561e55b57da546b04be0eedb7 +SIZE (rubygem/google-cloud-logging-v2-0.7.0.gem) = 92672 diff --git a/net/rubygem-opennebula/Makefile b/net/rubygem-opennebula/Makefile index f459b98f47c..e86d09e32b6 100644 --- a/net/rubygem-opennebula/Makefile +++ b/net/rubygem-opennebula/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= opennebula -PORTVERSION= 6.2.0 +PORTVERSION= 6.2.1 CATEGORIES= net rubygems MASTER_SITES= RG diff --git a/net/rubygem-opennebula/distinfo b/net/rubygem-opennebula/distinfo index 19792712b75..01d2850ab71 100644 --- a/net/rubygem-opennebula/distinfo +++ b/net/rubygem-opennebula/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971202 -SHA256 (rubygem/opennebula-6.2.0.gem) = ce37e89bd3808e6fc1c329bd9e801ef999996e431e55f9c13b72757f07eb7db3 -SIZE (rubygem/opennebula-6.2.0.gem) = 211456 +TIMESTAMP = 1647264852 +SHA256 (rubygem/opennebula-6.2.1.gem) = 9f55e254cb5730d44a1bb67576f04613869a016ff900913286657e2cb65eaf83 +SIZE (rubygem/opennebula-6.2.1.gem) = 217088 diff --git a/net/rubygem-ovirt-engine-sdk/Makefile b/net/rubygem-ovirt-engine-sdk/Makefile index bf7fd5eb2b4..9b577e3d16d 100644 --- a/net/rubygem-ovirt-engine-sdk/Makefile +++ b/net/rubygem-ovirt-engine-sdk/Makefile @@ -2,6 +2,7 @@ PORTNAME= ovirt-engine-sdk PORTVERSION= 4.4.1 +PORTREVISION= 1 CATEGORIES= net rubygems MASTER_SITES= RG diff --git a/net/rubygem-train-core/Makefile b/net/rubygem-train-core/Makefile index 2ef3fb5666a..86c72d83bb4 100644 --- a/net/rubygem-train-core/Makefile +++ b/net/rubygem-train-core/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= train-core -PORTVERSION= 3.8.6 +PORTVERSION= 3.8.7 CATEGORIES= net rubygems MASTER_SITES= RG diff --git a/net/rubygem-train-core/distinfo b/net/rubygem-train-core/distinfo index a838a37327d..2a02603c79f 100644 --- a/net/rubygem-train-core/distinfo +++ b/net/rubygem-train-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971206 -SHA256 (rubygem/train-core-3.8.6.gem) = 43a617c3d82fffd7edca08534e51dc86c9484c47677a19362191c93faa30e6b1 -SIZE (rubygem/train-core-3.8.6.gem) = 46592 +TIMESTAMP = 1647264856 +SHA256 (rubygem/train-core-3.8.7.gem) = fe389ed52e01eff125ef7826f92c4e1e577b17ddd54f8826809a216bcd440780 +SIZE (rubygem/train-core-3.8.7.gem) = 46080 diff --git a/net/rubygem-train/Makefile b/net/rubygem-train/Makefile index 63f22281aa2..dc3d343b7d0 100644 --- a/net/rubygem-train/Makefile +++ b/net/rubygem-train/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= train -PORTVERSION= 3.8.6 +PORTVERSION= 3.8.7 CATEGORIES= net rubygems MASTER_SITES= RG diff --git a/net/rubygem-train/distinfo b/net/rubygem-train/distinfo index b091cb3d7a8..98f31b23e4b 100644 --- a/net/rubygem-train/distinfo +++ b/net/rubygem-train/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971204 -SHA256 (rubygem/train-3.8.6.gem) = c920e22369e45fe843f8763f1c1ab86014e9437ed28c36ea4f7845d9126d44d0 -SIZE (rubygem/train-3.8.6.gem) = 15360 +TIMESTAMP = 1647264854 +SHA256 (rubygem/train-3.8.7.gem) = 017c75c2f6c5fbc3360e50a2a79b5132a65e138df94c604960d5a637a8aa979c +SIZE (rubygem/train-3.8.7.gem) = 15360 diff --git a/net/traefik/Makefile b/net/traefik/Makefile index 5dc6677a2ee..a5a9b5eed59 100644 --- a/net/traefik/Makefile +++ b/net/traefik/Makefile @@ -1,5 +1,5 @@ PORTNAME= traefik -PORTVERSION= 2.6.1 +PORTVERSION= 2.6.2 CATEGORIES= net MASTER_SITES= LOCAL/riggs/${PORTNAME} diff --git a/net/traefik/distinfo b/net/traefik/distinfo index feeb6b0b43a..7949e4660a5 100644 --- a/net/traefik/distinfo +++ b/net/traefik/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645311214 -SHA256 (traefik-2.6.1.tar.xz) = a7837bd1d20d694f06b34ce63292aa1ebef5ab45e9ce473edfe207a9bc2c771c -SIZE (traefik-2.6.1.tar.xz) = 17225992 +TIMESTAMP = 1648380739 +SHA256 (traefik-2.6.2.tar.xz) = a7f97eb607018e87c5b6c744f28ddf8a2c0e79d36efe87d99be5926e3f048979 +SIZE (traefik-2.6.2.tar.xz) = 17249180 diff --git a/net/urlendec/Makefile b/net/urlendec/Makefile index 2a8182ff633..118c7cf06dd 100644 --- a/net/urlendec/Makefile +++ b/net/urlendec/Makefile @@ -6,7 +6,7 @@ PORTREVISION= 1 CATEGORIES= net MASTER_SITES= http://www.whizkidtech.redprince.net/fports/ -MAINTAINER= ports@FreeBSD.org +MAINTAINER= danfe@FreeBSD.org COMMENT= URL encoder and decoder written in assembly LICENSE= BSD2CLAUSE diff --git a/net/vinagre/Makefile b/net/vinagre/Makefile index b58296449ad..ca1101b4be0 100644 --- a/net/vinagre/Makefile +++ b/net/vinagre/Makefile @@ -2,7 +2,7 @@ PORTNAME= vinagre PORTVERSION= 3.22.0 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= net gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/net/wireshark/Makefile b/net/wireshark/Makefile index 79f477a26bb..ce6ce9c4cbf 100644 --- a/net/wireshark/Makefile +++ b/net/wireshark/Makefile @@ -2,6 +2,7 @@ PORTNAME?= wireshark DISTVERSION= 3.6.2 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= https://1.na.dl.wireshark.org/src/ \ https://1.eu.dl.wireshark.org/src/ \ diff --git a/net/yaz/Makefile b/net/yaz/Makefile index 7047259e501..119488cbd19 100644 --- a/net/yaz/Makefile +++ b/net/yaz/Makefile @@ -2,6 +2,7 @@ PORTNAME= yaz DISTVERSION= 5.31.1 +PORTREVISION= 1 CATEGORIES= net MASTER_SITES= https://ftp.indexdata.dk/pub/yaz/ diff --git a/news/nzbget/Makefile b/news/nzbget/Makefile index 1e4db5bedd9..1dc834ad479 100644 --- a/news/nzbget/Makefile +++ b/news/nzbget/Makefile @@ -2,6 +2,7 @@ PORTNAME= nzbget PORTVERSION= 21.1 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= news diff --git a/print/Makefile b/print/Makefile index 81976afd3a7..511972f430e 100644 --- a/print/Makefile +++ b/print/Makefile @@ -188,6 +188,7 @@ SUBDIR += py-preppy SUBDIR += py-psautohint SUBDIR += py-pycups + SUBDIR += py-pypdf3 SUBDIR += py-python-ly SUBDIR += py-relatorio SUBDIR += py-reportlab diff --git a/print/R-cran-knitr/Makefile b/print/R-cran-knitr/Makefile index a329e23f807..c2c7c6f1d4f 100644 --- a/print/R-cran-knitr/Makefile +++ b/print/R-cran-knitr/Makefile @@ -1,7 +1,7 @@ # Created by: TAKATSU Tomonari PORTNAME= knitr -PORTVERSION= 1.37 +PORTVERSION= 1.38 CATEGORIES= print DISTNAME= ${PORTNAME}_${PORTVERSION} @@ -10,11 +10,11 @@ COMMENT= General-Purpose Package for Dynamic Report Generation in R LICENSE= GPLv2+ -CRAN_DEPENDS= R-cran-evaluate>=0.10:devel/R-cran-evaluate \ +CRAN_DEPENDS= R-cran-evaluate>=0.15:devel/R-cran-evaluate \ R-cran-highr>0:textproc/R-cran-highr \ R-cran-stringr>=0.6:textproc/R-cran-stringr \ R-cran-yaml>=2.1.19:textproc/R-cran-yaml \ - R-cran-xfun>=0.27:misc/R-cran-xfun + R-cran-xfun>=0.29:misc/R-cran-xfun BUILD_DEPENDS= ${CRAN_DEPENDS} RUN_DEPENDS= ${CRAN_DEPENDS} diff --git a/print/R-cran-knitr/distinfo b/print/R-cran-knitr/distinfo index 6eb6a674c2e..3a37a7641aa 100644 --- a/print/R-cran-knitr/distinfo +++ b/print/R-cran-knitr/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1639969532 -SHA256 (knitr_1.37.tar.gz) = 39cd2a4848baebbe7fa0c0ab8200179690fb5b9190f0c1688d987c38363ad763 -SIZE (knitr_1.37.tar.gz) = 895567 +TIMESTAMP = 1648360272 +SHA256 (knitr_1.38.tar.gz) = d138e881414eed915cadcb8c82ffbbab002614f1d492cbf413cded255ab5e7ad +SIZE (knitr_1.38.tar.gz) = 898373 diff --git a/print/adobe-cmaps/Makefile b/print/adobe-cmaps/Makefile index 9a7d90bfeb4..6c6a4327c2b 100644 --- a/print/adobe-cmaps/Makefile +++ b/print/adobe-cmaps/Makefile @@ -15,6 +15,7 @@ COMMENT= Adobe CMap collection USES= fonts NO_WRKSUBDIR= yes +NO_ARCH= yes NO_BUILD= yes INSTDIRS= ac15 ag15 ai0 aj16 aj20 ak12 rksj-cmaps FONTSDIR= ${PREFIX}/share/fonts/${PORTNAME} diff --git a/print/fontforge/Makefile b/print/fontforge/Makefile index 1d09441fde0..c753a187f68 100644 --- a/print/fontforge/Makefile +++ b/print/fontforge/Makefile @@ -2,7 +2,7 @@ PORTNAME= fontforge DISTVERSION= 20201107 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= print MASTER_SITES= https://github.com/${PORTNAME}/${PORTNAME}/releases/download/${DISTVERSION}/ diff --git a/print/foomatic-db-engine/Makefile b/print/foomatic-db-engine/Makefile index 37f4d058e6d..1ca72842bab 100644 --- a/print/foomatic-db-engine/Makefile +++ b/print/foomatic-db-engine/Makefile @@ -2,6 +2,7 @@ PORTNAME= foomatic-db-engine PORTVERSION= 4.0.13 +PORTREVISION= 1 PORTEPOCH= 2 CATEGORIES= print MASTER_SITES= http://www.openprinting.org/download/foomatic/ diff --git a/print/miktex/Makefile b/print/miktex/Makefile index 83833863f1a..cc28d972a31 100644 --- a/print/miktex/Makefile +++ b/print/miktex/Makefile @@ -1,5 +1,6 @@ PORTNAME= miktex DISTVERSION= 21.8 +PORTREVISION= 1 CATEGORIES= print MASTER_SITES= LOCAL/arrowd/miktex:bootstrap DISTFILES= miktex-zzdb1-2.9.tar.lzma:bootstrap \ diff --git a/print/pdfchain/Makefile b/print/pdfchain/Makefile index 74685c3b375..07e5fdcb55f 100644 --- a/print/pdfchain/Makefile +++ b/print/pdfchain/Makefile @@ -1,5 +1,6 @@ PORTNAME= pdfchain PORTVERSION= 0.4.4.2 +PORTREVISION= 1 CATEGORIES= print MASTER_SITES= SF/pdfchain/pdfchain-${PORTVERSION} diff --git a/print/py-fontparts/Makefile b/print/py-fontparts/Makefile index bf27f2b3a50..ae00b4eb1e1 100644 --- a/print/py-fontparts/Makefile +++ b/print/py-fontparts/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= fontparts -PORTVERSION= 0.10.2 +PORTVERSION= 0.10.3 CATEGORIES= print python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/print/py-fontparts/distinfo b/print/py-fontparts/distinfo index c53eea553e0..6ce0d84699c 100644 --- a/print/py-fontparts/distinfo +++ b/print/py-fontparts/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971118 -SHA256 (fontParts-0.10.2.zip) = a3a3926e977f82ae19e6823760b59f2338085973da1eaad5badaf969f261a737 -SIZE (fontParts-0.10.2.zip) = 496231 +TIMESTAMP = 1647264646 +SHA256 (fontParts-0.10.3.zip) = 687b632c775cdbfb37a6917c7f3f2a140ab1c0428c64924014d941699ec501be +SIZE (fontParts-0.10.3.zip) = 496432 diff --git a/print/py-pdf/Makefile b/print/py-pdf/Makefile index ca52688efa6..787e5e0c4f7 100644 --- a/print/py-pdf/Makefile +++ b/print/py-pdf/Makefile @@ -11,7 +11,7 @@ DISTNAME= pyPdf-${PORTVERSION} MAINTAINER= ports@FreeBSD.org COMMENT= Pure-Python PDF toolkit -USES= python:3.6+ +USES= dos2unix python:3.6+ USE_PYTHON= distutils autoplist .include diff --git a/print/py-pdf/files/patch-2to3 b/print/py-pdf/files/patch-2to3 new file mode 100644 index 00000000000..8ae24ce5b25 --- /dev/null +++ b/print/py-pdf/files/patch-2to3 @@ -0,0 +1,638 @@ +--- pyPdf/generic.py.orig 2010-12-04 22:49:56 UTC ++++ pyPdf/generic.py +@@ -35,9 +35,9 @@ __author__ = "Mathieu Fenniak" + __author_email__ = "biziqe@mathieu.fenniak.net" + + import re +-from utils import readNonWhitespace, RC4_encrypt +-import filters +-import utils ++from .utils import readNonWhitespace, RC4_encrypt ++from . import filters ++from . import utils + import decimal + import codecs + +@@ -99,7 +99,7 @@ class NullObject(PdfObject): + def readFromStream(stream): + nulltxt = stream.read(4) + if nulltxt != "null": +- raise utils.PdfReadError, "error reading null object" ++ raise utils.PdfReadError("error reading null object") + return NullObject() + readFromStream = staticmethod(readFromStream) + +@@ -137,7 +137,7 @@ class ArrayObject(list, PdfObject): + arr = ArrayObject() + tmp = stream.read(1) + if tmp != "[": +- raise utils.PdfReadError, "error reading array" ++ raise utils.PdfReadError("error reading array") + while True: + # skip leading whitespace + tok = stream.read(1) +@@ -241,7 +241,7 @@ class NumberObject(int, PdfObject): + # Given a string (either a "str" or "unicode"), create a ByteStringObject or a + # TextStringObject to represent the string. + def createStringObject(string): +- if isinstance(string, unicode): ++ if isinstance(string, str): + return TextStringObject(string) + elif isinstance(string, str): + if string.startswith(codecs.BOM_UTF16_BE): +@@ -367,7 +367,7 @@ class ByteStringObject(str, PdfObject): + # If read from a PDF document, this string appeared to match the + # PDFDocEncoding, or contained a UTF-16BE BOM mark to cause UTF-16 decoding to + # occur. +-class TextStringObject(unicode, PdfObject): ++class TextStringObject(str, PdfObject): + autodetect_pdfdocencoding = False + autodetect_utf16 = False + +@@ -425,7 +425,7 @@ class NameObject(str, PdfObject): + def readFromStream(stream): + name = stream.read(1) + if name != "/": +- raise utils.PdfReadError, "name read error" ++ raise utils.PdfReadError("name read error") + while True: + tok = stream.read(1) + if tok.isspace() or tok in NameObject.delimiterCharacters: +@@ -456,7 +456,7 @@ class DictionaryObject(dict, PdfObject): + def update(self, arr): + # note, a ValueError halfway through copying values + # will leave half the values in this dict. +- for k, v in arr.iteritems(): ++ for k, v in arr.items(): + self.__setitem__(k, v) + + def raw_get(self, key): +@@ -492,7 +492,7 @@ class DictionaryObject(dict, PdfObject): + if metadata == None: + return None + metadata = metadata.getObject() +- import xmp ++ from . import xmp + if not isinstance(metadata, xmp.XmpInformation): + metadata = xmp.XmpInformation(metadata) + self[NameObject("/Metadata")] = metadata +@@ -507,7 +507,7 @@ class DictionaryObject(dict, PdfObject): + + def writeToStream(self, stream, encryption_key): + stream.write("<<\n") +- for key, value in self.items(): ++ for key, value in list(self.items()): + key.writeToStream(stream, encryption_key) + stream.write(" ") + value.writeToStream(stream, encryption_key) +@@ -517,7 +517,7 @@ class DictionaryObject(dict, PdfObject): + def readFromStream(stream, pdf): + tmp = stream.read(2) + if tmp != "<<": +- raise utils.PdfReadError, "dictionary read error" ++ raise utils.PdfReadError("dictionary read error") + data = {} + while True: + tok = readNonWhitespace(stream) +@@ -529,9 +529,9 @@ class DictionaryObject(dict, PdfObject): + tok = readNonWhitespace(stream) + stream.seek(-1, 1) + value = readObject(stream, pdf) +- if data.has_key(key): ++ if key in data: + # multiple definitions of key not permitted +- raise utils.PdfReadError, "multiple definitions in dictionary" ++ raise utils.PdfReadError("multiple definitions in dictionary") + data[key] = value + pos = stream.tell() + s = readNonWhitespace(stream) +@@ -546,7 +546,7 @@ class DictionaryObject(dict, PdfObject): + # read \n after + stream.read(1) + # this is a stream object, not a dictionary +- assert data.has_key("/Length") ++ assert "/Length" in data + length = data["/Length"] + if isinstance(length, IndirectObject): + t = stream.tell() +@@ -570,10 +570,10 @@ class DictionaryObject(dict, PdfObject): + data["__streamdata__"] = data["__streamdata__"][:-1] + else: + stream.seek(pos, 0) +- raise utils.PdfReadError, "Unable to find 'endstream' marker after stream." ++ raise utils.PdfReadError("Unable to find 'endstream' marker after stream.") + else: + stream.seek(pos, 0) +- if data.has_key("__streamdata__"): ++ if "__streamdata__" in data: + return StreamObject.initializeFromDictionary(data) + else: + retval = DictionaryObject() +@@ -599,7 +599,7 @@ class StreamObject(DictionaryObject): + stream.write("\nendstream") + + def initializeFromDictionary(data): +- if data.has_key("/Filter"): ++ if "/Filter" in data: + retval = EncodedStreamObject() + else: + retval = DecodedStreamObject() +@@ -611,7 +611,7 @@ class StreamObject(DictionaryObject): + initializeFromDictionary = staticmethod(initializeFromDictionary) + + def flateEncode(self): +- if self.has_key("/Filter"): ++ if "/Filter" in self: + f = self["/Filter"] + if isinstance(f, ArrayObject): + f.insert(0, NameObject("/FlateDecode")) +@@ -648,14 +648,14 @@ class EncodedStreamObject(StreamObject): + # create decoded object + decoded = DecodedStreamObject() + decoded._data = filters.decodeStreamData(self) +- for key, value in self.items(): ++ for key, value in list(self.items()): + if not key in ("/Length", "/Filter", "/DecodeParms"): + decoded[key] = value + self.decodedSelf = decoded + return decoded._data + + def setData(self, data): +- raise utils.PdfReadError, "Creating EncodedStreamObject is not currently supported" ++ raise utils.PdfReadError("Creating EncodedStreamObject is not currently supported") + + + class RectangleObject(ArrayObject): +@@ -744,56 +744,56 @@ def encode_pdfdocencoding(unicode_string): + return retval + + def decode_pdfdocencoding(byte_array): +- retval = u'' ++ retval = '' + for b in byte_array: + c = _pdfDocEncoding[ord(b)] +- if c == u'\u0000': ++ if c == '\u0000': + raise UnicodeDecodeError("pdfdocencoding", b, -1, -1, + "does not exist in translation table") + retval += c + return retval + + _pdfDocEncoding = ( +- u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', +- u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', +- u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', u'\u0000', +- u'\u02d8', u'\u02c7', u'\u02c6', u'\u02d9', u'\u02dd', u'\u02db', u'\u02da', u'\u02dc', +- u'\u0020', u'\u0021', u'\u0022', u'\u0023', u'\u0024', u'\u0025', u'\u0026', u'\u0027', +- u'\u0028', u'\u0029', u'\u002a', u'\u002b', u'\u002c', u'\u002d', u'\u002e', u'\u002f', +- u'\u0030', u'\u0031', u'\u0032', u'\u0033', u'\u0034', u'\u0035', u'\u0036', u'\u0037', +- u'\u0038', u'\u0039', u'\u003a', u'\u003b', u'\u003c', u'\u003d', u'\u003e', u'\u003f', +- u'\u0040', u'\u0041', u'\u0042', u'\u0043', u'\u0044', u'\u0045', u'\u0046', u'\u0047', +- u'\u0048', u'\u0049', u'\u004a', u'\u004b', u'\u004c', u'\u004d', u'\u004e', u'\u004f', +- u'\u0050', u'\u0051', u'\u0052', u'\u0053', u'\u0054', u'\u0055', u'\u0056', u'\u0057', +- u'\u0058', u'\u0059', u'\u005a', u'\u005b', u'\u005c', u'\u005d', u'\u005e', u'\u005f', +- u'\u0060', u'\u0061', u'\u0062', u'\u0063', u'\u0064', u'\u0065', u'\u0066', u'\u0067', +- u'\u0068', u'\u0069', u'\u006a', u'\u006b', u'\u006c', u'\u006d', u'\u006e', u'\u006f', +- u'\u0070', u'\u0071', u'\u0072', u'\u0073', u'\u0074', u'\u0075', u'\u0076', u'\u0077', +- u'\u0078', u'\u0079', u'\u007a', u'\u007b', u'\u007c', u'\u007d', u'\u007e', u'\u0000', +- u'\u2022', u'\u2020', u'\u2021', u'\u2026', u'\u2014', u'\u2013', u'\u0192', u'\u2044', +- u'\u2039', u'\u203a', u'\u2212', u'\u2030', u'\u201e', u'\u201c', u'\u201d', u'\u2018', +- u'\u2019', u'\u201a', u'\u2122', u'\ufb01', u'\ufb02', u'\u0141', u'\u0152', u'\u0160', +- u'\u0178', u'\u017d', u'\u0131', u'\u0142', u'\u0153', u'\u0161', u'\u017e', u'\u0000', +- u'\u20ac', u'\u00a1', u'\u00a2', u'\u00a3', u'\u00a4', u'\u00a5', u'\u00a6', u'\u00a7', +- u'\u00a8', u'\u00a9', u'\u00aa', u'\u00ab', u'\u00ac', u'\u0000', u'\u00ae', u'\u00af', +- u'\u00b0', u'\u00b1', u'\u00b2', u'\u00b3', u'\u00b4', u'\u00b5', u'\u00b6', u'\u00b7', +- u'\u00b8', u'\u00b9', u'\u00ba', u'\u00bb', u'\u00bc', u'\u00bd', u'\u00be', u'\u00bf', +- u'\u00c0', u'\u00c1', u'\u00c2', u'\u00c3', u'\u00c4', u'\u00c5', u'\u00c6', u'\u00c7', +- u'\u00c8', u'\u00c9', u'\u00ca', u'\u00cb', u'\u00cc', u'\u00cd', u'\u00ce', u'\u00cf', +- u'\u00d0', u'\u00d1', u'\u00d2', u'\u00d3', u'\u00d4', u'\u00d5', u'\u00d6', u'\u00d7', +- u'\u00d8', u'\u00d9', u'\u00da', u'\u00db', u'\u00dc', u'\u00dd', u'\u00de', u'\u00df', +- u'\u00e0', u'\u00e1', u'\u00e2', u'\u00e3', u'\u00e4', u'\u00e5', u'\u00e6', u'\u00e7', +- u'\u00e8', u'\u00e9', u'\u00ea', u'\u00eb', u'\u00ec', u'\u00ed', u'\u00ee', u'\u00ef', +- u'\u00f0', u'\u00f1', u'\u00f2', u'\u00f3', u'\u00f4', u'\u00f5', u'\u00f6', u'\u00f7', +- u'\u00f8', u'\u00f9', u'\u00fa', u'\u00fb', u'\u00fc', u'\u00fd', u'\u00fe', u'\u00ff' ++ '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', ++ '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', ++ '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', '\u0000', ++ '\u02d8', '\u02c7', '\u02c6', '\u02d9', '\u02dd', '\u02db', '\u02da', '\u02dc', ++ '\u0020', '\u0021', '\u0022', '\u0023', '\u0024', '\u0025', '\u0026', '\u0027', ++ '\u0028', '\u0029', '\u002a', '\u002b', '\u002c', '\u002d', '\u002e', '\u002f', ++ '\u0030', '\u0031', '\u0032', '\u0033', '\u0034', '\u0035', '\u0036', '\u0037', ++ '\u0038', '\u0039', '\u003a', '\u003b', '\u003c', '\u003d', '\u003e', '\u003f', ++ '\u0040', '\u0041', '\u0042', '\u0043', '\u0044', '\u0045', '\u0046', '\u0047', ++ '\u0048', '\u0049', '\u004a', '\u004b', '\u004c', '\u004d', '\u004e', '\u004f', ++ '\u0050', '\u0051', '\u0052', '\u0053', '\u0054', '\u0055', '\u0056', '\u0057', ++ '\u0058', '\u0059', '\u005a', '\u005b', '\u005c', '\u005d', '\u005e', '\u005f', ++ '\u0060', '\u0061', '\u0062', '\u0063', '\u0064', '\u0065', '\u0066', '\u0067', ++ '\u0068', '\u0069', '\u006a', '\u006b', '\u006c', '\u006d', '\u006e', '\u006f', ++ '\u0070', '\u0071', '\u0072', '\u0073', '\u0074', '\u0075', '\u0076', '\u0077', ++ '\u0078', '\u0079', '\u007a', '\u007b', '\u007c', '\u007d', '\u007e', '\u0000', ++ '\u2022', '\u2020', '\u2021', '\u2026', '\u2014', '\u2013', '\u0192', '\u2044', ++ '\u2039', '\u203a', '\u2212', '\u2030', '\u201e', '\u201c', '\u201d', '\u2018', ++ '\u2019', '\u201a', '\u2122', '\ufb01', '\ufb02', '\u0141', '\u0152', '\u0160', ++ '\u0178', '\u017d', '\u0131', '\u0142', '\u0153', '\u0161', '\u017e', '\u0000', ++ '\u20ac', '\u00a1', '\u00a2', '\u00a3', '\u00a4', '\u00a5', '\u00a6', '\u00a7', ++ '\u00a8', '\u00a9', '\u00aa', '\u00ab', '\u00ac', '\u0000', '\u00ae', '\u00af', ++ '\u00b0', '\u00b1', '\u00b2', '\u00b3', '\u00b4', '\u00b5', '\u00b6', '\u00b7', ++ '\u00b8', '\u00b9', '\u00ba', '\u00bb', '\u00bc', '\u00bd', '\u00be', '\u00bf', ++ '\u00c0', '\u00c1', '\u00c2', '\u00c3', '\u00c4', '\u00c5', '\u00c6', '\u00c7', ++ '\u00c8', '\u00c9', '\u00ca', '\u00cb', '\u00cc', '\u00cd', '\u00ce', '\u00cf', ++ '\u00d0', '\u00d1', '\u00d2', '\u00d3', '\u00d4', '\u00d5', '\u00d6', '\u00d7', ++ '\u00d8', '\u00d9', '\u00da', '\u00db', '\u00dc', '\u00dd', '\u00de', '\u00df', ++ '\u00e0', '\u00e1', '\u00e2', '\u00e3', '\u00e4', '\u00e5', '\u00e6', '\u00e7', ++ '\u00e8', '\u00e9', '\u00ea', '\u00eb', '\u00ec', '\u00ed', '\u00ee', '\u00ef', ++ '\u00f0', '\u00f1', '\u00f2', '\u00f3', '\u00f4', '\u00f5', '\u00f6', '\u00f7', ++ '\u00f8', '\u00f9', '\u00fa', '\u00fb', '\u00fc', '\u00fd', '\u00fe', '\u00ff' + ) + + assert len(_pdfDocEncoding) == 256 + + _pdfDocEncoding_rev = {} +-for i in xrange(256): ++for i in range(256): + char = _pdfDocEncoding[i] +- if char == u"\u0000": ++ if char == "\u0000": + continue + assert char not in _pdfDocEncoding_rev + _pdfDocEncoding_rev[char] = i +--- pyPdf/pdf.py.orig 2010-12-04 22:49:56 UTC ++++ pyPdf/pdf.py +@@ -44,15 +44,15 @@ import math + import struct + from sys import version_info + try: +- from cStringIO import StringIO ++ from io import StringIO + except ImportError: +- from StringIO import StringIO ++ from io import StringIO + +-import filters +-import utils ++from . import filters ++from . import utils + import warnings +-from generic import * +-from utils import readNonWhitespace, readUntilWhitespace, ConvertFunctionsToVirtualList ++from .generic import * ++from .utils import readNonWhitespace, readUntilWhitespace, ConvertFunctionsToVirtualList + + if version_info < ( 2, 4 ): + from sets import ImmutableSet as frozenset +@@ -82,7 +82,7 @@ class PdfFileWriter(object): + # info object + info = DictionaryObject() + info.update({ +- NameObject("/Producer"): createStringObject(u"Python PDF Library - http://pybrary.net/pyPdf/") ++ NameObject("/Producer"): createStringObject("Python PDF Library - http://pybrary.net/pyPdf/") + }) + self._info = self._addObject(info) + +@@ -250,13 +250,13 @@ class PdfFileWriter(object): + # we sweep for indirect references. This forces self-page-referencing + # trees to reference the correct new object location, rather than + # copying in a new copy of the page object. +- for objIndex in xrange(len(self._objects)): ++ for objIndex in range(len(self._objects)): + obj = self._objects[objIndex] + if isinstance(obj, PageObject) and obj.indirectRef != None: + data = obj.indirectRef +- if not externalReferenceMap.has_key(data.pdf): ++ if data.pdf not in externalReferenceMap: + externalReferenceMap[data.pdf] = {} +- if not externalReferenceMap[data.pdf].has_key(data.generation): ++ if data.generation not in externalReferenceMap[data.pdf]: + externalReferenceMap[data.pdf][data.generation] = {} + externalReferenceMap[data.pdf][data.generation][data.idnum] = IndirectObject(objIndex + 1, 0, self) + +@@ -310,7 +310,7 @@ class PdfFileWriter(object): + + def _sweepIndirectReferences(self, externMap, data): + if isinstance(data, DictionaryObject): +- for key, value in data.items(): ++ for key, value in list(data.items()): + origvalue = value + value = self._sweepIndirectReferences(externMap, value) + if isinstance(value, StreamObject): +@@ -346,9 +346,9 @@ class PdfFileWriter(object): + self._objects.append(None) # placeholder + idnum = len(self._objects) + newobj_ido = IndirectObject(idnum, 0, self) +- if not externMap.has_key(data.pdf): ++ if data.pdf not in externMap: + externMap[data.pdf] = {} +- if not externMap[data.pdf].has_key(data.generation): ++ if data.generation not in externMap[data.pdf]: + externMap[data.pdf][data.generation] = {} + externMap[data.pdf][data.generation][data.idnum] = newobj_ido + newobj = self._sweepIndirectReferences(externMap, newobj) +@@ -385,7 +385,7 @@ class PdfFileReader(object): + # @return Returns a {@link #DocumentInformation DocumentInformation} + # instance, or None if none exists. + def getDocumentInfo(self): +- if not self.trailer.has_key("/Info"): ++ if "/Info" not in self.trailer: + return None + obj = self.trailer['/Info'] + retval = DocumentInformation() +@@ -471,27 +471,27 @@ class PdfFileReader(object): + catalog = self.trailer["/Root"] + + # get the name tree +- if catalog.has_key("/Dests"): ++ if "/Dests" in catalog: + tree = catalog["/Dests"] +- elif catalog.has_key("/Names"): ++ elif "/Names" in catalog: + names = catalog['/Names'] +- if names.has_key("/Dests"): ++ if "/Dests" in names: + tree = names['/Dests'] + + if tree == None: + return retval + +- if tree.has_key("/Kids"): ++ if "/Kids" in tree: + # recurse down the tree + for kid in tree["/Kids"]: + self.getNamedDestinations(kid.getObject(), retval) + +- if tree.has_key("/Names"): ++ if "/Names" in tree: + names = tree["/Names"] + for i in range(0, len(names), 2): + key = names[i].getObject() + val = names[i+1].getObject() +- if isinstance(val, DictionaryObject) and val.has_key('/D'): ++ if isinstance(val, DictionaryObject) and '/D' in val: + val = val['/D'] + dest = self._buildDestination(key, val) + if dest != None: +@@ -517,9 +517,9 @@ class PdfFileReader(object): + catalog = self.trailer["/Root"] + + # get the outline dictionary and named destinations +- if catalog.has_key("/Outlines"): ++ if "/Outlines" in catalog: + lines = catalog["/Outlines"] +- if lines.has_key("/First"): ++ if "/First" in lines: + node = lines["/First"] + self._namedDests = self.getNamedDestinations() + +@@ -533,13 +533,13 @@ class PdfFileReader(object): + outlines.append(outline) + + # check for sub-outlines +- if node.has_key("/First"): ++ if "/First" in node: + subOutlines = [] + self.getOutlines(node["/First"], subOutlines) + if subOutlines: + outlines.append(subOutlines) + +- if not node.has_key("/Next"): ++ if "/Next" not in node: + break + node = node["/Next"] + +@@ -553,13 +553,13 @@ class PdfFileReader(object): + def _buildOutline(self, node): + dest, title, outline = None, None, None + +- if node.has_key("/A") and node.has_key("/Title"): ++ if "/A" in node and "/Title" in node: + # Action, section 8.5 (only type GoTo supported) + title = node["/Title"] + action = node["/A"] + if action["/S"] == "/GoTo": + dest = action["/D"] +- elif node.has_key("/Dest") and node.has_key("/Title"): ++ elif "/Dest" in node and "/Title" in node: + # Destination, section 8.2.1 + title = node["/Title"] + dest = node["/Dest"] +@@ -568,7 +568,7 @@ class PdfFileReader(object): + if dest: + if isinstance(dest, ArrayObject): + outline = self._buildDestination(title, dest) +- elif isinstance(dest, unicode) and self._namedDests.has_key(dest): ++ elif isinstance(dest, str) and dest in self._namedDests: + outline = self._namedDests[dest] + outline[NameObject("/Title")] = title + else: +@@ -598,7 +598,7 @@ class PdfFileReader(object): + t = pages["/Type"] + if t == "/Pages": + for attr in inheritablePageAttributes: +- if pages.has_key(attr): ++ if attr in pages: + inherit[attr] = pages[attr] + for page in pages["/Kids"]: + addt = {} +@@ -606,10 +606,10 @@ class PdfFileReader(object): + addt["indirectRef"] = page + self._flatten(page.getObject(), inherit, **addt) + elif t == "/Page": +- for attr,value in inherit.items(): ++ for attr,value in list(inherit.items()): + # if the page has it's own value, it does not inherit the + # parent's value: +- if not pages.has_key(attr): ++ if attr not in pages: + pages[attr] = value + pageObj = PageObject(self, indirectRef) + pageObj.update(pages) +@@ -620,7 +620,7 @@ class PdfFileReader(object): + if retval != None: + return retval + if indirectReference.generation == 0 and \ +- self.xref_objStm.has_key(indirectReference.idnum): ++ indirectReference.idnum in self.xref_objStm: + # indirect reference to object in object stream + # read the entire object stream into memory + stmnum,idx = self.xref_objStm[indirectReference.idnum] +@@ -652,7 +652,7 @@ class PdfFileReader(object): + if not self._override_encryption and self.isEncrypted: + # if we don't have the encryption key: + if not hasattr(self, '_decryption_key'): +- raise Exception, "file has not been decrypted" ++ raise Exception("file has not been decrypted") + # otherwise, decrypt here... + import struct + pack1 = struct.pack("= len_self: +- raise IndexError, "sequence index out of range" ++ raise IndexError("sequence index out of range") + return self.getFunction(index) + + def RC4_encrypt(key, plaintext): +@@ -117,6 +117,6 @@ class PageSizeNotDefinedError(PyPdfError): + if __name__ == "__main__": + # test RC4 + out = RC4_encrypt("Key", "Plaintext") +- print repr(out) ++ print(repr(out)) + pt = RC4_encrypt("Key", out) +- print repr(pt) ++ print(repr(pt)) diff --git a/print/py-pypdf3/Makefile b/print/py-pypdf3/Makefile new file mode 100644 index 00000000000..de2d8b230aa --- /dev/null +++ b/print/py-pypdf3/Makefile @@ -0,0 +1,23 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= pypdf3 +PORTVERSION= 1.0.6 +CATEGORIES= print python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +DISTNAME= PyPDF3-${PORTVERSION} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Pure Python PDF toolkit + +LICENSE= BSD3CLAUSE +LICENSE_FILE= ${WRKSRC}/LICENSE + +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}tqdm>=0:misc/py-tqdm@${PY_FLAVOR} + +USES= python:3.7+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +.include diff --git a/print/py-pypdf3/distinfo b/print/py-pypdf3/distinfo new file mode 100644 index 00000000000..161899c17d2 --- /dev/null +++ b/print/py-pypdf3/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264410 +SHA256 (PyPDF3-1.0.6.tar.gz) = c946f3273419e37258e35e72273f49904ab15723d87a761c1115ef99799f8c5f +SIZE (PyPDF3-1.0.6.tar.gz) = 294825 diff --git a/print/py-pypdf3/pkg-descr b/print/py-pypdf3/pkg-descr new file mode 100644 index 00000000000..81a1b3fc5e0 --- /dev/null +++ b/print/py-pypdf3/pkg-descr @@ -0,0 +1,8 @@ +PyPDF3 is a fork of PyPDF2 with feature improvements. + +PyPDF3 is a pure-python PDF library capable of splitting, merging together, +cropping, and transforming the pages of PDF files. It can also add custom data, +viewing options, and passwords to PDF files. It can retrieve text and metadata +from PDFs as well as merge entire files together. + +WWW: https://github.com/sfneal/PyPDF3 diff --git a/print/py-ttfautohint-py/Makefile b/print/py-ttfautohint-py/Makefile index b6f70680e57..396288218bd 100644 --- a/print/py-ttfautohint-py/Makefile +++ b/print/py-ttfautohint-py/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= ttfautohint-py -PORTVERSION= 0.5.0 +PORTVERSION= 0.5.1 CATEGORIES= print python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/print/py-ttfautohint-py/distinfo b/print/py-ttfautohint-py/distinfo index aae89606bc3..1bbd4ca71db 100644 --- a/print/py-ttfautohint-py/distinfo +++ b/print/py-ttfautohint-py/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058104 -SHA256 (ttfautohint-py-0.5.0.tar.gz) = 0212be8d5590bab2f0e47d827b5f84b5a7124089d94a1d8106ea5d4e155af639 -SIZE (ttfautohint-py-0.5.0.tar.gz) = 188072 +TIMESTAMP = 1647264650 +SHA256 (ttfautohint-py-0.5.1.tar.gz) = d5e324e1437399ab3ed2ffeb68f8b3235d3ad151eefc8375a1c3a380f5fa94fa +SIZE (ttfautohint-py-0.5.1.tar.gz) = 188227 diff --git a/print/py-uharfbuzz/Makefile b/print/py-uharfbuzz/Makefile index c85e762ef4a..7684c346265 100644 --- a/print/py-uharfbuzz/Makefile +++ b/print/py-uharfbuzz/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= uharfbuzz -PORTVERSION= 0.21.0 +PORTVERSION= 0.22.0 CATEGORIES= print python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/print/py-uharfbuzz/distinfo b/print/py-uharfbuzz/distinfo index ab1fffc948c..94a7e6eb83d 100644 --- a/print/py-uharfbuzz/distinfo +++ b/print/py-uharfbuzz/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058106 -SHA256 (uharfbuzz-0.21.0.zip) = 01b9c463f876f426fdc6dffc8e7eef60d46546502d5ebc121e81dd6e003089e3 -SIZE (uharfbuzz-0.21.0.zip) = 1184982 +TIMESTAMP = 1647264654 +SHA256 (uharfbuzz-0.22.0.zip) = 82b68867fffd78d84e664ee872a193a897a35f398e40095ede7759456da4ebb6 +SIZE (uharfbuzz-0.22.0.zip) = 1205322 diff --git a/print/py-uharfbuzz/files/patch-harfbuzz4 b/print/py-uharfbuzz/files/patch-harfbuzz4 deleted file mode 100644 index da16a1cbe6c..00000000000 --- a/print/py-uharfbuzz/files/patch-harfbuzz4 +++ /dev/null @@ -1,440 +0,0 @@ -Obtained from: https://github.com/harfbuzz/uharfbuzz/pull/110/commits/6a926ca7746a35a98ed8d719c870e340d1733e63 - https://github.com/harfbuzz/uharfbuzz/pull/110/commits/5d9150cc267b16401f98d0f0effd656dcc0d6c38 - ---- src/uharfbuzz/_harfbuzz.pyx.orig 2022-02-08 07:41:12 UTC -+++ src/uharfbuzz/_harfbuzz.pyx -@@ -1,5 +1,6 @@ - #cython: language_level=3 - import os -+import warnings - from enum import IntEnum - from .charfbuzz cimport * - from libc.stdlib cimport free, malloc -@@ -529,12 +530,12 @@ cdef class Font: - def close_path(c): - c.closePath() - -- funcs.set_move_to_func(move_to) -- funcs.set_line_to_func(line_to) -- funcs.set_cubic_to_func(cubic_to) -- funcs.set_quadratic_to_func(quadratic_to) -- funcs.set_close_path_func(close_path) -- funcs.draw_glyph(self, gid, pen) -+ funcs.set_move_to_func(move_to, pen) -+ funcs.set_line_to_func(line_to, pen) -+ funcs.set_cubic_to_func(cubic_to, pen) -+ funcs.set_quadratic_to_func(quadratic_to, pen) -+ funcs.set_close_path_func(close_path, pen) -+ funcs.get_glyph_shape(self, gid) - - - cdef hb_position_t _glyph_h_advance_func(hb_font_t* font, void* font_data, -@@ -888,39 +889,69 @@ def ot_layout_get_baseline(font: Font, - def ot_font_set_funcs(Font font): - hb_ot_font_set_funcs(font._hb_font) - --cdef void _move_to_func(hb_position_t to_x, -- hb_position_t to_y, -+cdef void _move_to_func(hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float to_x, -+ float to_y, - void *user_data): -- m = (user_data).move_to_func() -- m(to_x, to_y, (user_data).user_data()) -+ m = (draw_data).move_to_func() -+ userdata = user_data -+ if userdata is None: -+ userdata = (draw_data).user_data() -+ m(to_x, to_y, userdata) - --cdef void _line_to_func(hb_position_t to_x, -- hb_position_t to_y, -+cdef void _line_to_func(hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float to_x, -+ float to_y, - void *user_data): -- l = (user_data).line_to_func() -- l(to_x, to_y, (user_data).user_data()) -+ l = (draw_data).line_to_func() -+ userdata = user_data -+ if userdata is None: -+ userdata = (draw_data).user_data() -+ l(to_x, to_y, userdata) - --cdef void _close_path_func(void *user_data): -- cl = (user_data).close_path_func() -- cl((user_data).user_data()) -+cdef void _close_path_func(hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ void *user_data): -+ cl = (draw_data).close_path_func() -+ userdata = user_data -+ if userdata is None: -+ userdata = (draw_data).user_data() -+ cl(userdata) - --cdef void _quadratic_to_func(hb_position_t c1_x, -- hb_position_t c1_y, -- hb_position_t to_x, -- hb_position_t to_y, -- void *user_data): -- q = (user_data).quadratic_to_func() -- q(c1_x, c1_y, to_x, to_y, (user_data).user_data()) -+cdef void _quadratic_to_func(hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float c1_x, -+ float c1_y, -+ float to_x, -+ float to_y, -+ void *user_data): -+ q = (draw_data).quadratic_to_func() -+ userdata = user_data -+ if userdata is None: -+ userdata = (draw_data).user_data() -+ q(c1_x, c1_y, to_x, to_y, userdata) - --cdef void _cubic_to_func(hb_position_t c1_x, -- hb_position_t c1_y, -- hb_position_t c2_x, -- hb_position_t c2_y, -- hb_position_t to_x, -- hb_position_t to_y, -- void *user_data): -- c = (user_data).cubic_to_func() -- c(c1_x, c1_y, c2_x, c2_y, to_x, to_y, (user_data).user_data()) -+cdef void _cubic_to_func(hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float c1_x, -+ float c1_y, -+ float c2_x, -+ float c2_y, -+ float to_x, -+ float to_y, -+ void *user_data): -+ c = (draw_data).cubic_to_func() -+ userdata = user_data -+ if userdata is None: -+ userdata = (draw_data).user_data() -+ c(c1_x, c1_y, c2_x, c2_y, to_x, to_y, userdata) - - - cdef class DrawFuncs: -@@ -939,9 +970,16 @@ cdef class DrawFuncs: - def __dealloc__(self): - hb_draw_funcs_destroy(self._hb_drawfuncs) - -+ def get_glyph_shape(self, font: Font, gid: int): -+ hb_font_get_glyph_shape(font._hb_font, gid, self._hb_drawfuncs, self); -+ - def draw_glyph(self, font: Font, gid: int, user_data: object): -+ warnings.warn( -+ "draw_glyph() is deprecated, use get_glyph_shape() instead", -+ DeprecationWarning, -+ ) - self._user_data = user_data -- hb_font_draw_glyph(font._hb_font, gid, self._hb_drawfuncs, self); -+ self.get_glyph_shape(font, gid) - - def move_to_func(self): - return self._move_to_func -@@ -963,54 +1001,59 @@ cdef class DrawFuncs: - - def set_move_to_func(self, - func: Callable[[ -- int, -- int, -+ float, -+ float, - object, # user_data -- ], None]) -> None: -+ ], None], -+ user_data: object = None) -> None: - self._move_to_func = func - hb_draw_funcs_set_move_to_func( -- self._hb_drawfuncs, _move_to_func) -+ self._hb_drawfuncs, _move_to_func, user_data, NULL) - - def set_line_to_func(self, - func: Callable[[ -- int, -- int, -+ float, -+ float, - object, # user_data -- ], None]) -> None: -+ ], None], -+ user_data: object = None) -> None: - self._line_to_func = func - hb_draw_funcs_set_line_to_func( -- self._hb_drawfuncs, _line_to_func) -+ self._hb_drawfuncs, _line_to_func, user_data, NULL) - - def set_cubic_to_func(self, - func: Callable[[ -- int, -- int, -- int, -- int, -- int, -- int, -+ float, -+ float, -+ float, -+ float, -+ float, -+ float, - object, # user_data -- ], None]) -> None: -+ ], None], -+ user_data: object = None) -> None: - self._cubic_to_func = func - hb_draw_funcs_set_cubic_to_func( -- self._hb_drawfuncs, _cubic_to_func) -+ self._hb_drawfuncs, _cubic_to_func, user_data, NULL) - - def set_quadratic_to_func(self, - func: Callable[[ -- int, -- int, -- int, -- int, -+ float, -+ float, -+ float, -+ float, - object, # user_data -- ], None]) -> None: -+ ], None], -+ user_data: object = None) -> None: - self._quadratic_to_func = func - hb_draw_funcs_set_quadratic_to_func( -- self._hb_drawfuncs, _quadratic_to_func) -+ self._hb_drawfuncs, _quadratic_to_func, user_data, NULL) - - def set_close_path_func(self, - func: Callable[[ - object -- ], None]) -> None: -+ ], None], -+ user_data: object = None) -> None: - self._close_path_func = func - hb_draw_funcs_set_close_path_func( -- self._hb_drawfuncs, _close_path_func) -+ self._hb_drawfuncs, _close_path_func, user_data, NULL) ---- src/uharfbuzz/charfbuzz.pxd.orig 2022-02-08 07:41:12 UTC -+++ src/uharfbuzz/charfbuzz.pxd -@@ -59,6 +59,15 @@ cdef extern from "hb.h": - unsigned short u8[4] - short i8[4] - -+ ctypedef union hb_var_num_t: -+ float f -+ unsigned long u32 -+ long i32 -+ unsigned int u16[2] -+ int i16[2] -+ unsigned short u8[4] -+ short i8[4] -+ - # hb-blob.h - ctypedef struct hb_blob_t: - pass -@@ -300,63 +309,106 @@ cdef extern from "hb.h": - unsigned int size) - void hb_font_destroy(hb_font_t* font) - -+ ctypedef struct hb_draw_state_t: -+ hb_bool_t path_open -+ float path_start_x -+ float path_start_y -+ float current_x -+ float current_y -+ hb_var_num_t reserved1 -+ hb_var_num_t reserved2 -+ hb_var_num_t reserved3 -+ hb_var_num_t reserved4 -+ hb_var_num_t reserved5 -+ hb_var_num_t reserved6 -+ hb_var_num_t reserved7 -+ - ctypedef struct hb_draw_funcs_t: - pass - - ctypedef void (*hb_draw_move_to_func_t) ( -- hb_position_t to_x, -- hb_position_t to_y, -+ hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float to_x, -+ float to_y, - void *user_data); -+ - ctypedef void (*hb_draw_line_to_func_t) ( -- hb_position_t to_x, -- hb_position_t to_y, -+ hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float to_x, -+ float to_y, - void *user_data); -+ - ctypedef void (*hb_draw_quadratic_to_func_t) ( -- hb_position_t control_x, -- hb_position_t control_y, -- hb_position_t to_x, -- hb_position_t to_y, -+ hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float control_x, -+ float control_y, -+ float to_x, -+ float to_y, - void *user_data); -+ - ctypedef void (*hb_draw_cubic_to_func_t) ( -- hb_position_t control1_x, -- hb_position_t control1_y, -- hb_position_t control2_x, -- hb_position_t control2_y, -- hb_position_t to_x, -- hb_position_t to_y, -+ hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, -+ float control1_x, -+ float control1_y, -+ float control2_x, -+ float control2_y, -+ float to_x, -+ float to_y, - void *user_data); -+ - ctypedef void (*hb_draw_close_path_func_t) ( -+ hb_draw_funcs_t *dfuncs, -+ void *draw_data, -+ hb_draw_state_t *st, - void *user_data); - - void hb_draw_funcs_set_move_to_func ( -- hb_draw_funcs_t* funcs, -- hb_draw_move_to_func_t move_to) -+ hb_draw_funcs_t* dfuncs, -+ hb_draw_move_to_func_t func, -+ void *user_data, -+ hb_destroy_func_t destroy) - - void hb_draw_funcs_set_line_to_func ( -- hb_draw_funcs_t* funcs, -- hb_draw_line_to_func_t line_to) -+ hb_draw_funcs_t* dfuncs, -+ hb_draw_line_to_func_t func, -+ void *user_data, -+ hb_destroy_func_t destroy) - - void hb_draw_funcs_set_quadratic_to_func ( -- hb_draw_funcs_t* funcs, -- hb_draw_quadratic_to_func_t quadratic_to) -+ hb_draw_funcs_t* dfuncs, -+ hb_draw_quadratic_to_func_t func, -+ void *user_data, -+ hb_destroy_func_t destroy) - - void hb_draw_funcs_set_cubic_to_func ( -- hb_draw_funcs_t* funcs, -- hb_draw_cubic_to_func_t cubic_to) -+ hb_draw_funcs_t* dfuncs, -+ hb_draw_cubic_to_func_t func, -+ void *user_data, -+ hb_destroy_func_t destroy) - - void hb_draw_funcs_set_close_path_func( -- hb_draw_funcs_t* funcs, -- hb_draw_close_path_func_t close_path) -+ hb_draw_funcs_t* dfuncs, -+ hb_draw_close_path_func_t func, -+ void *user_data, -+ hb_destroy_func_t destroy) - - hb_draw_funcs_t* hb_draw_funcs_create() - - void hb_draw_funcs_destroy(hb_draw_funcs_t* funcs) - -- hb_bool_t hb_font_draw_glyph( -+ void hb_font_get_glyph_shape( - hb_font_t *font, - hb_codepoint_t glyph, -- const hb_draw_funcs_t *funcs, -- void *user_data) -+ const hb_draw_funcs_t *dfuncs, -+ void *draw_data) - - # hb-shape.h - void hb_shape( ---- tests/test_uharfbuzz.py.orig 2022-02-08 07:41:12 UTC -+++ tests/test_uharfbuzz.py -@@ -495,27 +495,49 @@ class TestCallbacks: - buf.set_message_func(message_collector.message) - hb.shape(blankfont, buf) - -- - def test_draw_funcs(self, opensans): - funcs = hb.DrawFuncs() - container = [] - def move_to(x,y,c): -- c.append(f"M{x},{y}") -+ c.append(f"M{x:g},{y:g}") - def line_to(x,y,c): -- c.append(f"L{x},{y}") -+ c.append(f"L{x:g},{y:g}") - def cubic_to(c1x,c1y,c2x,c2y,x,y,c): -- c.append(f"C{c1x},{c1y} {c2x},{c2y} {x},{y}") -+ c.append(f"C{c1x:g},{c1y:g} {c2x:g},{c2y:g} {x:g},{y:g}") - def quadratic_to(c1x,c1y,x,y,c): -- c.append(f"Q{c1x},{c1y} {x},{y}") -+ c.append(f"Q{c1x:g},{c1y:g} {x:g},{y:g}") - def close_path(c): - c.append("Z") - -+ funcs.set_move_to_func(move_to, container) -+ funcs.set_line_to_func(line_to, container) -+ funcs.set_cubic_to_func(cubic_to, container) -+ funcs.set_quadratic_to_func(quadratic_to, container) -+ funcs.set_close_path_func(close_path, container) -+ funcs.get_glyph_shape(opensans, 1) -+ assert "".join(container) == "M1120,0L938,465L352,465L172,0L0,0L578,1468L721,1468L1296,0L1120,0ZM885,618L715,1071Q682,1157 647,1282Q625,1186 584,1071L412,618L885,618Z" -+ -+ def test_draw_funcs_deprecated(self, opensans): -+ funcs = hb.DrawFuncs() -+ container = [] -+ def move_to(x,y,c): -+ c.append(f"M{x:g},{y:g}") -+ def line_to(x,y,c): -+ c.append(f"L{x:g},{y:g}") -+ def cubic_to(c1x,c1y,c2x,c2y,x,y,c): -+ c.append(f"C{c1x:g},{c1y:g} {c2x:g},{c2y:g} {x:g},{y:g}") -+ def quadratic_to(c1x,c1y,x,y,c): -+ c.append(f"Q{c1x:g},{c1y:g} {x:g},{y:g}") -+ def close_path(c): -+ c.append("Z") -+ - funcs.set_move_to_func(move_to) - funcs.set_line_to_func(line_to) - funcs.set_cubic_to_func(cubic_to) - funcs.set_quadratic_to_func(quadratic_to) - funcs.set_close_path_func(close_path) -- funcs.draw_glyph(opensans, 1, container) -+ with pytest.warns(DeprecationWarning): -+ funcs.draw_glyph(opensans, 1, container) - assert "".join(container) == "M1120,0L938,465L352,465L172,0L0,0L578,1468L721,1468L1296,0L1120,0ZM885,618L715,1071Q682,1157 647,1282Q625,1186 584,1071L412,618L885,618Z" - - def test_draw_pen(self, opensans): diff --git a/print/scribus-devel/Makefile b/print/scribus-devel/Makefile index 24385f37329..2ebb373cf58 100644 --- a/print/scribus-devel/Makefile +++ b/print/scribus-devel/Makefile @@ -1,5 +1,6 @@ PORTNAME= scribus PORTVERSION= 1.5.8 +PORTREVISION= 1 CATEGORIES= print editors MASTER_SITES= SF/${PORTNAME}/${PORTNAME}${PKGNAMESUFFIX}/${PORTVERSION}/ \ http://www.scribus.net/downloads/${PORTVERSION}/ diff --git a/print/simple-fb2-reader/Makefile b/print/simple-fb2-reader/Makefile index 3da5f3f56c4..8c5eaee0eb7 100644 --- a/print/simple-fb2-reader/Makefile +++ b/print/simple-fb2-reader/Makefile @@ -2,6 +2,7 @@ PORTNAME= simple-fb2-reader PORTVERSION= 1.1.3 +PORTREVISION= 1 CATEGORIES= print MAINTAINER= danfe@FreeBSD.org diff --git a/print/xreader/Makefile b/print/xreader/Makefile index dbd481b0c08..f2e9038f8f1 100644 --- a/print/xreader/Makefile +++ b/print/xreader/Makefile @@ -2,7 +2,7 @@ PORTNAME= xreader PORTVERSION= 3.2.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= print gnome DIST_SUBDIR= gnome diff --git a/science/abinit/Makefile b/science/abinit/Makefile index 505cfc6cc1e..453b3ae51e1 100644 --- a/science/abinit/Makefile +++ b/science/abinit/Makefile @@ -2,7 +2,7 @@ PORTNAME= abinit DISTVERSION= 9.4.2 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= science MAINTAINER= yuri@FreeBSD.org diff --git a/science/afni/Makefile b/science/afni/Makefile index 95ae7c0a66f..762d3c81f3f 100644 --- a/science/afni/Makefile +++ b/science/afni/Makefile @@ -1,6 +1,7 @@ PORTNAME= afni DISTVERSIONPREFIX= AFNI_ DISTVERSION= 22.0.21 +PORTREVISION= 1 CATEGORIES= science biology graphics perl5 python MAINTAINER= fernape@FreeBSD.org diff --git a/science/atompaw/Makefile b/science/atompaw/Makefile index 3aa20f95cff..05300978ff6 100644 --- a/science/atompaw/Makefile +++ b/science/atompaw/Makefile @@ -1,5 +1,5 @@ PORTNAME= atompaw -DISTVERSION= 4.1.1.0 +DISTVERSION= 4.2.0.0 CATEGORIES= science MASTER_SITES= http://users.wfu.edu/natalie/papers/pwpaw/ diff --git a/science/atompaw/distinfo b/science/atompaw/distinfo index c4958735e76..e4b7b4c23f2 100644 --- a/science/atompaw/distinfo +++ b/science/atompaw/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1611957268 -SHA256 (atompaw-4.1.1.0.tar.gz) = b1ee2b53720066655d98523ef337e54850cb1e68b3a2da04ff5a1576d3893891 -SIZE (atompaw-4.1.1.0.tar.gz) = 5675066 +TIMESTAMP = 1648350246 +SHA256 (atompaw-4.2.0.0.tar.gz) = dde5bc216544180a44e0bbd77668feb741621fd3fb0a14bd98df721363710c62 +SIZE (atompaw-4.2.0.0.tar.gz) = 5822912 diff --git a/science/chemical-mime-data/Makefile b/science/chemical-mime-data/Makefile index efd990ec73f..0658e24faeb 100644 --- a/science/chemical-mime-data/Makefile +++ b/science/chemical-mime-data/Makefile @@ -2,7 +2,7 @@ PORTNAME= chemical-mime-data PORTVERSION= 0.1.94 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= science MASTER_SITES= SF/chemical-mime/${PORTNAME}/${PORTVERSION} diff --git a/science/code_saturne/Makefile b/science/code_saturne/Makefile index 664246fd001..d8c066a23fb 100644 --- a/science/code_saturne/Makefile +++ b/science/code_saturne/Makefile @@ -2,7 +2,7 @@ PORTNAME= code_saturne PORTVERSION= 7.1.0 -PORTREVISION= 2 +PORTREVISION= 3 DISTVERSIONPREFIX= v CATEGORIES= science parallel diff --git a/science/fleur/Makefile b/science/fleur/Makefile index 0d9a30e747a..f41a94f62ef 100644 --- a/science/fleur/Makefile +++ b/science/fleur/Makefile @@ -1,5 +1,6 @@ PORTNAME= fleur DISTVERSION= 5.1.20220103 +PORTREVISION= 1 CATEGORIES= science # chemistry MAINTAINER= yuri@FreeBSD.org diff --git a/science/gchemutils/Makefile b/science/gchemutils/Makefile index 080b755a1f0..ad2d0d78f8f 100644 --- a/science/gchemutils/Makefile +++ b/science/gchemutils/Makefile @@ -2,7 +2,7 @@ PORTNAME= gchemutils PORTVERSION= 0.14.16 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= science MASTER_SITES= SAVANNAH/${PORTNAME}/${PORTVERSION:R} DISTNAME= gnome-chemistry-utils-${PORTVERSION} diff --git a/science/ghemical/Makefile b/science/ghemical/Makefile index a80ee062afc..eb1472ffd4f 100644 --- a/science/ghemical/Makefile +++ b/science/ghemical/Makefile @@ -2,7 +2,7 @@ PORTNAME= ghemical PORTVERSION= 3.0.0 -PORTREVISION= 21 +PORTREVISION= 22 CATEGORIES= science MASTER_SITES= http://bioinformatics.org/ghemical/download/%SUBDIR%/ MASTER_SITE_SUBDIR= release20111012 current diff --git a/science/ghmm/Makefile b/science/ghmm/Makefile index bf051a511dd..727ff15e8b8 100644 --- a/science/ghmm/Makefile +++ b/science/ghmm/Makefile @@ -2,6 +2,7 @@ PORTNAME= ghmm DISTVERSION= 0.9-rc3 +PORTREVISION= 1 CATEGORIES= science math MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTNAME}%20${DISTVERSION} diff --git a/science/gromacs/Makefile b/science/gromacs/Makefile index 353ece71697..36e71feef0e 100644 --- a/science/gromacs/Makefile +++ b/science/gromacs/Makefile @@ -2,6 +2,7 @@ PORTNAME= gromacs DISTVERSION= 2021.5 +PORTREVISION= 1 CATEGORIES= science MASTER_SITES= ftp://ftp.gromacs.org/pub/gromacs/ diff --git a/science/lammps/Makefile b/science/lammps/Makefile index 4d3ddd9d7c8..e14688e7bce 100644 --- a/science/lammps/Makefile +++ b/science/lammps/Makefile @@ -1,5 +1,6 @@ PORTNAME= lammps PORTVERSION= ${GH_TAGNAME:C/(stable|patch)_([0-9]{1,2})([A-Z][a-z][a-z])([0-9]{4})/\4.\3.\2/S/Jan/01/S/Feb/02/S/Mar/03/S/Apr/04/S/May/05/S/Jun/06/S/Jul/07/S/Aug/08/S/Sep/09/S/Oct/10/S/Nov/11/S/Dec/12/} +PORTREVISION= 1 CATEGORIES= science MAINTAINER= yuri@FreeBSD.org @@ -19,13 +20,13 @@ LIB_DEPENDS= libevent.so:devel/libevent \ libssh2.so:security/libssh2 \ libzstd.so:archivers/zstd -USES= blaslapack:openblas cmake:noninja compiler:c++14-lang eigen:3 fortran gnome localbase:ldflags python:build shebangfix +USES= blaslapack:openblas cmake:noninja compiler:c++14-lang eigen:3 fortran gnome localbase:ldflags python shebangfix USE_GNOME= libxml2 USE_LDCONFIG= yes USE_PYTHON= cython USE_GITHUB= yes -GH_TAGNAME= patch_7Jan2022 +GH_TAGNAME= patch_24Mar2022 SHEBANG_GLOB= *.sh *.bash SHEBANG_FILES= lib/kokkos/bin/nvcc_wrapper @@ -127,14 +128,19 @@ PORTSCOUT= ignore:1 # due to a special version tagging scheme that this project CMAKE_ARGS+= -DBUILD_OMP:BOOL=true .endif -post-patch: +xpost-patch: # adjust SHELL in Makefile @${REINPLACE_CMD} 's|SHELL = /bin/bash|SHELL = ${LOCALBASE}/bin/bash|' ${WRKSRC}/src/Makefile # fix python version, also see https://github.com/lammps/lammps/issues/3098, https://github.com/lammps/lammps/issues/3099 @${FIND} ${WRKSRC} -name "CMakeLists.txt" -o -name "*.cmake" | ${XARGS} ${REINPLACE_CMD} -e ' \ - s|find_package(Python |find_package(Python $${FREEBSD_PYTHON_VER} EXACT |; \ - s|find_package(Python3 3.6 |find_package(Python3 |; \ - s|find_package(Python3 |find_package(Python3 $${FREEBSD_PYTHON_VER} EXACT | \ + s|find_package(Python3 REQUIRED |find_package(Python $${FREEBSD_PYTHON_VER} EXACT REQUIRED |; \ + s|find_package(Python 3.5 COMPONENTS |find_package(Python $${FREEBSD_PYTHON_VER} EXACT COMPONENTS |; \ + s|find_package(Python 3.6 COMPONENTS |find_package(Python $${FREEBSD_PYTHON_VER} EXACT COMPONENTS |; \ + s|find_package(Python3 REQUIRED |find_package(Python $${FREEBSD_PYTHON_VER} EXACT REQUIRED |; \ + s|find_package(Python COMPONENTS |find_package(Python3 $${FREEBSD_PYTHON_VER} EXACT COMPONENTS |; \ + s|find_package(Python3 COMPONENTS |find_package(Python3 $${FREEBSD_PYTHON_VER} EXACT COMPONENTS |; \ + s|find_package(Python QUIET COMPONENTS |find_package(Python3 $${FREEBSD_PYTHON_VER} EXACT QUIET COMPONENTS |; \ + s|find_package(Python REQUIRED COMPONENTS |find_package(Python3 $${FREEBSD_PYTHON_VER} EXACT REQUIRED COMPONENTS | \ ' post-patch-FFMPEG-off: diff --git a/science/lammps/distinfo b/science/lammps/distinfo index 2725cf434b0..7ee4a58c93f 100644 --- a/science/lammps/distinfo +++ b/science/lammps/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643126318 -SHA256 (lammps-lammps-2022.01.7-patch_7Jan2022_GH0.tar.gz) = fbf6c6814968ae0d772d7b6783079ff4f249a8faeceb39992c344969e9f1edbb -SIZE (lammps-lammps-2022.01.7-patch_7Jan2022_GH0.tar.gz) = 121770953 +TIMESTAMP = 1648168879 +SHA256 (lammps-lammps-2022.03.24-patch_24Mar2022_GH0.tar.gz) = d791cc93eedfc345fdf87bfa5b6f7e17e461f86ba197f9e9c3d477ce8657a7ef +SIZE (lammps-lammps-2022.03.24-patch_24Mar2022_GH0.tar.gz) = 117161592 diff --git a/science/lammps/pkg-plist b/science/lammps/pkg-plist index 433033605f3..64a280d1f99 100644 --- a/science/lammps/pkg-plist +++ b/science/lammps/pkg-plist @@ -77,6 +77,7 @@ man/man1/lmp.1.gz %%DATADIR%%/potentials/CH.airebo %%DATADIR%%/potentials/CH.airebo-m %%DATADIR%%/potentials/CH.rebo +%%DATADIR%%/potentials/CHAu.ILP %%DATADIR%%/potentials/CH_taper.KC %%DATADIR%%/potentials/CdTe.bop.table %%DATADIR%%/potentials/CdTe.sw @@ -145,6 +146,7 @@ man/man1/lmp.1.gz %%DATADIR%%/potentials/Mo5.2.mgpt.README %%DATADIR%%/potentials/Mo5.2.mgpt.parmin %%DATADIR%%/potentials/Mo5.2.mgpt.potin +%%DATADIR%%/potentials/MoS2.ILP %%DATADIR%%/potentials/Mo_Chen_PRM2017.snap %%DATADIR%%/potentials/Mo_Chen_PRM2017.snapcoeff %%DATADIR%%/potentials/Mo_Chen_PRM2017.snapparam diff --git a/science/openbabel/Makefile b/science/openbabel/Makefile index bc51643fb6b..6eb08df69bd 100644 --- a/science/openbabel/Makefile +++ b/science/openbabel/Makefile @@ -2,7 +2,7 @@ PORTNAME= openbabel DISTVERSION= 3.1.1 -PORTREVISION= 2 +PORTREVISION= 3 DISTVERSIONSUFFIX= -source CATEGORIES= science MASTER_SITES= https://github.com/openbabel/openbabel/releases/download/openbabel-${PORTVERSION:C/\./-/g}/ diff --git a/science/orthanc/Makefile b/science/orthanc/Makefile index dc07dff3912..612f4f8ea43 100644 --- a/science/orthanc/Makefile +++ b/science/orthanc/Makefile @@ -2,7 +2,7 @@ PORTNAME= orthanc DISTVERSION= 1.9.7 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= science MASTER_SITES= https://www.orthanc-server.com/downloads/get.php?path=/orthanc/ DISTNAME= Orthanc-${PORTVERSION} diff --git a/science/py-asdf-standard/Makefile b/science/py-asdf-standard/Makefile index 50a74c24674..7b92ab75a08 100644 --- a/science/py-asdf-standard/Makefile +++ b/science/py-asdf-standard/Makefile @@ -13,7 +13,7 @@ COMMENT= ASDF Standard schemas LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE -#BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}setuptools_scm>=0:devel/py-setuptools_scm@${PY_FLAVOR} +BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}setuptools_scm>=0:devel/py-setuptools_scm@${PY_FLAVOR} USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils diff --git a/science/py-asdf/Makefile b/science/py-asdf/Makefile index 7b19bea743f..7c3ceef0a19 100644 --- a/science/py-asdf/Makefile +++ b/science/py-asdf/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= asdf -PORTVERSION= 2.10.0 +PORTVERSION= 2.10.1 CATEGORIES= science python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,8 +13,8 @@ LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}setuptools_scm>=0:devel/py-setuptools_scm@${PY_FLAVOR} -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}asdf-standard>=1.0.0:science/py-asdf-standard@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}asdf-transform-schemas>=0.2.0:science/py-asdf-transform-schemas@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}asdf-standard>=1.0.1:science/py-asdf-standard@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}asdf-transform-schemas>=0.2.2:science/py-asdf-transform-schemas@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}jmespath>=0.6.2:devel/py-jmespath@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}jsonschema3>=3.0.2<4:devel/py-jsonschema3@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}lz4>=0.10:archivers/py-lz4@${PY_FLAVOR} \ @@ -23,13 +23,11 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}asdf-standard>=1.0.0:science/py-asdf-standar ${PYTHON_PKGNAMEPREFIX}yaml>=3.10:devel/py-yaml@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}semantic-version>=2.8:devel/py-semantic-version@${PY_FLAVOR} -USES= python:3.7+ #shebangfix +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes -#SHEBANG_FILES= asdf/extern/RangeHTTPServer.py - .include .if ${PYTHON_REL} < 30900 diff --git a/science/py-asdf/distinfo b/science/py-asdf/distinfo index c6e7e66fb90..1f82661742c 100644 --- a/science/py-asdf/distinfo +++ b/science/py-asdf/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058108 -SHA256 (asdf-2.10.0.tar.gz) = a838db39b2ef51d586f909625c6de3b0bf28c0323525107601a7a475b0bc388e -SIZE (asdf-2.10.0.tar.gz) = 448685 +TIMESTAMP = 1647264656 +SHA256 (asdf-2.10.1.tar.gz) = f7e569f29b3723939efec8164eb2ed7274bdd480b0b283d75833f0f59d108409 +SIZE (asdf-2.10.1.tar.gz) = 446711 diff --git a/science/py-cirq-google/Makefile b/science/py-cirq-google/Makefile index 09674df38ae..70604474735 100644 --- a/science/py-cirq-google/Makefile +++ b/science/py-cirq-google/Makefile @@ -1,6 +1,6 @@ PORTNAME= cirq-google DISTVERSIONPREFIX= v -DISTVERSION= 0.13.0 +DISTVERSION= 0.14.0 CATEGORIES= science python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/science/py-cirq-google/distinfo b/science/py-cirq-google/distinfo index 0e389d477fb..913605cbe73 100644 --- a/science/py-cirq-google/distinfo +++ b/science/py-cirq-google/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634484752 -SHA256 (quantumlib-Cirq-v0.13.0_GH0.tar.gz) = 5f4890fea5439091e73ddcf73a0e465c5c1e7242d9a4b725a0242cd55e6df278 -SIZE (quantumlib-Cirq-v0.13.0_GH0.tar.gz) = 8002392 +TIMESTAMP = 1648350389 +SHA256 (quantumlib-Cirq-v0.14.0_GH0.tar.gz) = ad0e7fa9aa311eeca2ceee4c938950e0d9de69d1720ef08164245874c8567ea9 +SIZE (quantumlib-Cirq-v0.14.0_GH0.tar.gz) = 8637424 diff --git a/science/py-eccodes/Makefile b/science/py-eccodes/Makefile index d6b96174ecc..27df603ee03 100644 --- a/science/py-eccodes/Makefile +++ b/science/py-eccodes/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= eccodes -PORTVERSION= 1.4.0 +PORTVERSION= 1.4.1 CATEGORIES= science python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/science/py-eccodes/distinfo b/science/py-eccodes/distinfo index 0fb89ba5f4e..3f74f4e5272 100644 --- a/science/py-eccodes/distinfo +++ b/science/py-eccodes/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641045996 -SHA256 (eccodes-1.4.0.tar.gz) = b737b75c48aaaa5bd72de4c3efdec6ace15ad15ca41451d4885dcd7654357380 -SIZE (eccodes-1.4.0.tar.gz) = 53698 +TIMESTAMP = 1647264658 +SHA256 (eccodes-1.4.1.tar.gz) = d5ef0642e3d51dedca7832d0fb44ad2b258d18a7d9fe3a2bb35755052d2383bc +SIZE (eccodes-1.4.1.tar.gz) = 54789 diff --git a/science/py-kliff/Makefile b/science/py-kliff/Makefile index 1b120499afc..fe41f2cd09b 100644 --- a/science/py-kliff/Makefile +++ b/science/py-kliff/Makefile @@ -1,5 +1,5 @@ PORTNAME= kliff -DISTVERSION= 0.3.1 +DISTVERSION= 0.3.3 CATEGORIES= science python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/science/py-kliff/distinfo b/science/py-kliff/distinfo index f586fbb632c..6aba931473e 100644 --- a/science/py-kliff/distinfo +++ b/science/py-kliff/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641248408 -SHA256 (kliff-0.3.1.tar.gz) = be9d436256a56697de9529eb0b5c52e6b6e1fa3788d89c66ce91a37a21265704 -SIZE (kliff-0.3.1.tar.gz) = 1900190 +TIMESTAMP = 1648347854 +SHA256 (kliff-0.3.3.tar.gz) = e144253734acbceb9c899c31fea90d9806ec2871d9c68198b253978f15d17d75 +SIZE (kliff-0.3.3.tar.gz) = 1969797 diff --git a/science/py-libpysal/Makefile b/science/py-libpysal/Makefile index 62b51d00128..2e785244f82 100644 --- a/science/py-libpysal/Makefile +++ b/science/py-libpysal/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= libpysal -PORTVERSION= 4.6.0 +PORTVERSION= 4.6.2 CATEGORIES= science python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/science/py-libpysal/distinfo b/science/py-libpysal/distinfo index 0bed6b2c981..4d5d4ede7d8 100644 --- a/science/py-libpysal/distinfo +++ b/science/py-libpysal/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133791 -SHA256 (libpysal-4.6.0.tar.gz) = 8a1fa713b7b9d7a037f6ce2ae5b9a72fc86b2da219994ca76d5ac439358987d0 -SIZE (libpysal-4.6.0.tar.gz) = 2395812 +TIMESTAMP = 1647264660 +SHA256 (libpysal-4.6.2.tar.gz) = 8a4c4651394aefc6332f2fb1f38336c559e50dc89f977bfaa3d8541610eaa634 +SIZE (libpysal-4.6.2.tar.gz) = 2396982 diff --git a/science/py-mdp/files/patch-mdp__configuration.py b/science/py-mdp/files/patch-mdp__configuration.py index 1437060584a..3a15968aa3b 100644 --- a/science/py-mdp/files/patch-mdp__configuration.py +++ b/science/py-mdp/files/patch-mdp__configuration.py @@ -1,11 +1,11 @@ ---- mdp/configuration.py.orig 2016-04-10 17:12:43 UTC +--- mdp/configuration.py.orig 2016-03-04 08:39:38 UTC +++ mdp/configuration.py @@ -388,7 +388,7 @@ def set_configuration(): try: import svm as libsvm libsvm.libsvm - except ImportError as exc: -+ except (ImportError, OSError) as exc: ++ except (ImportError, OSError) as exc: config.ExternalDepFailed('libsvm', exc) except AttributeError as exc: config.ExternalDepFailed('libsvm', 'libsvm version >= 2.91 required') diff --git a/science/py-oddt/Makefile b/science/py-oddt/Makefile index 09450612a8e..2cda6e2cb6c 100644 --- a/science/py-oddt/Makefile +++ b/science/py-oddt/Makefile @@ -1,5 +1,6 @@ PORTNAME= oddt DISTVERSION= 0.7 +PORTREVISION= 1 CATEGORIES= science python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -9,13 +10,14 @@ COMMENT= Open Drug Discovery Toolkit LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE -BUILD_DEPENDS= ${PYNUMPY} \ +PY_DEPENDS= ${PYNUMPY} \ ${PYTHON_PKGNAMEPREFIX}joblib>=0.9.4:devel/py-joblib@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pandas>=0.19.2:math/py-pandas@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}scikit-learn>=0.18:science/py-scikit-learn@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}scipy>=0.17:science/py-scipy@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}six>0:devel/py-six@${PY_FLAVOR} -RUN_DEPENDS:= ${BUILD_DEPENDS} +BUILD_DEPENDS= ${PY_DEPENDS} +RUN_DEPENDS= ${PY_DEPENDS} USES= python:3.7+ USE_GITHUB= yes diff --git a/science/py-oddt/files/patch-setup.py b/science/py-oddt/files/patch-setup.py new file mode 100644 index 00000000000..ce95f655591 --- /dev/null +++ b/science/py-oddt/files/patch-setup.py @@ -0,0 +1,13 @@ +- same patch as suggested here: https://github.com/oddt/oddt/issues/160 + +--- setup.py.orig 2022-03-25 17:16:33 UTC ++++ setup.py +@@ -9,7 +9,7 @@ setup(name='oddt', + author_email='mwojcikowski@ibb.waw.pl', + url='https://github.com/oddt/oddt', + license='BSD', +- packages=find_packages(), ++ packages=find_packages(exclude=['test*']), + package_data={'oddt.scoring.functions': ['NNScore/*.csv', + 'RFScore/*.csv', + 'PLECscore/*.json', diff --git a/science/py-paida/Makefile b/science/py-paida/Makefile index 486a046b573..c1c3a1ed2af 100644 --- a/science/py-paida/Makefile +++ b/science/py-paida/Makefile @@ -13,7 +13,7 @@ BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}tkinter>0:x11-toolkits/py-tkinter@${PY_FLA MAINTAINER= ports@FreeBSD.org COMMENT= Pure Python scientific analysis package -USES= python:3.6+ +USES= dos2unix python:3.6+ USE_PYTHON= distutils autoplist .include diff --git a/science/py-paida/files/patch-2to3 b/science/py-paida/files/patch-2to3 new file mode 100644 index 00000000000..8d31edcad6f --- /dev/null +++ b/science/py-paida/files/patch-2to3 @@ -0,0 +1,2038 @@ +--- paida/math/array/matrix.py.orig 2022-03-18 21:34:04 UTC ++++ paida/math/array/matrix.py +@@ -12,8 +12,8 @@ class matrix: + if hasattr(data, '__iter__'): + if not hasattr(data[0], '__iter__'): + data = [data] +- self._indicesR = range(len(data)) +- self._indicesC = range(len(data[0])) ++ self._indicesR = list(range(len(data))) ++ self._indicesC = list(range(len(data[0]))) + self.data = copy.deepcopy(data) + + def _createCopyLinked(self, indicesR, indicesC): +@@ -35,7 +35,7 @@ class matrix: + return matrix(data = result) + + def _format(self, data): +- return `data` ++ return repr(data) + + def __str__(self): + if len(self._indicesR) == 1: +@@ -574,7 +574,7 @@ class matrix: + V, H, ort = self._orthes(V, H, ort) + d, e, V, H = self._hqr2(d, e, V, H) + +- eigenvalues = zip(d, e) ++ eigenvalues = list(zip(d, e)) + eigenvectors = [] + for j in range(n): + eigenvector = [] +--- paida/math/optimize/pyoptimize.py.orig 2022-03-18 21:34:04 UTC ++++ paida/math/optimize/pyoptimize.py +@@ -55,7 +55,7 @@ def _constraint(evaluatorParameterSpace, constraints): + for constraint in constraints: + ### Jython2.1 doesn't understand exec(code, globals(), locals()) properly. + #eval(constraint, _normalNameSpace, evaluatorParameterSpace) +- exec constraint in _normalNameSpace, evaluatorParameterSpace ++ exec(constraint, _normalNameSpace, evaluatorParameterSpace) + + def copyEvaluatorParameterSpace(evaluatorParameterSpace): + newEvaluatorParameterSpace = evaluatorParameterSpace.copy() +@@ -261,7 +261,7 @@ def fmin_ncg(evaluatorValue, evaluatorGradient, evalua + break + + ### Compute a search direction by applying the CG method. +- gradient = map(evaluatorGradient, [evaluatorParameterSpace] * nFreeParameters, freeIndices) ++ gradient = list(map(evaluatorGradient, [evaluatorParameterSpace] * nFreeParameters, freeIndices)) + maggrad = 0.0 + psupi = [] + dri0 = 0.0 +@@ -364,16 +364,16 @@ def fmin_ncg(evaluatorValue, evaluatorGradient, evalua + warnflag = 1 + mesg = "Maximum number of iterations has been exceeded." + if display: +- print mesg +- print "\tCurrent function value: %f" % fval +- print "\tIterations: %d" % nIterations ++ print(mesg) ++ print("\tCurrent function value: %f" % fval) ++ print("\tIterations: %d" % nIterations) + else: + warnflag = 0 + mesg = "Optimization terminated successfully." + if display: +- print mesg +- print "\tCurrent function value: %f" % fval +- print "\tIterations: %d" % nIterations ++ print(mesg) ++ print("\tCurrent function value: %f" % fval) ++ print("\tIterations: %d" % nIterations) + + return fval, hessian, warnflag, mesg + +@@ -492,7 +492,7 @@ def geneticAlgorithm(evaluatorValue, evaluatorGradient + island2.terminate() + fval, hessian, warnflag, mesg = result + if display: +- print mesg ++ print(mesg) + threadEvaluatorParameterSpace = island.getEvaluatorParameterSpace() + _i2o(threadEvaluatorParameterSpace, limits, freeIndices, freeParameterNames) + updateEvaluatorParameterSpace(threadEvaluatorParameterSpace, evaluatorParameterSpace) +@@ -514,7 +514,7 @@ def geneticAlgorithm(evaluatorValue, evaluatorGradient + warnflag = 1 + mesg = "Maximum number of iterations has been exceeded." + if display: +- print mesg ++ print(mesg) + threadEvaluatorParameterSpace = island.getEvaluatorParameterSpace() + _i2o(threadEvaluatorParameterSpace, limits, freeIndices, freeParameterNames) + updateEvaluatorParameterSpace(threadEvaluatorParameterSpace, evaluatorParameterSpace) +@@ -782,8 +782,8 @@ class _Island(threading.Thread): + islanders[migratorIndex] = migrators[i] + + if display: +- print self, generation +- print best, evaluationMinimum / ndf ++ print(self, generation) ++ print(best, evaluationMinimum / ndf) + + condition = migrationService.getCondition() + condition.acquire() +--- paida/math/pylapack/ilaenv.py.orig 2022-03-18 21:34:04 UTC ++++ paida/math/pylapack/ilaenv.py +@@ -211,7 +211,7 @@ def ilaenv(ispec, name, opts, n1, n2, n3, n4): + c3 = subnam[3:6] + c4 = c3[1:3] + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 3: + subnam = name.upper() +@@ -224,39 +224,39 @@ def ilaenv(ispec, name, opts, n1, n2, n3, n4): + c3 = subnam[3:6] + c4 = c3[1:3] + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 4: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 5: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 6: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 7: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 8: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 9: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 10: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + elif ispec == 11: + error = 'ilaenv(ispec = %d) is not implemented yet.' % ispec +- print error ++ print(error) + raise error + else: + return -1 +--- paida/math/pylapack/pyblas/xerbla.py.orig 2022-03-18 21:34:04 UTC ++++ paida/math/pylapack/pyblas/xerbla.py +@@ -31,5 +31,5 @@ def xerbla(srname, info): + The position of the invalid parameter in the parameter list of the calling routine. + """ + +- print ' ** On entry to %s parameter number %d had an illegal value' % (srname, info[0]) ++ print(' ** On entry to %s parameter number %d had an illegal value' % (srname, info[0])) + raise stop() +--- paida/paida_core/IBaseStyle.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IBaseStyle.py +@@ -6,7 +6,7 @@ import types + class _convertException: + def __init__(self, message=None): + if message != None: +- print message ++ print(message) + + class baseParameter: + def __init__(self, default): +@@ -39,11 +39,11 @@ class listParameter(baseParameter): + baseParameter.__init__(self, default) + + def convert(self, dataString): +- if isinstance(dataString, types.StringTypes): ++ if isinstance(dataString, (str,)): + return list(eval(dataString)) +- elif isinstance(dataString, types.ListType): ++ elif isinstance(dataString, list): + return dataString +- elif isinstance(dataString, types.TupleType): ++ elif isinstance(dataString, tuple): + return list(dataString) + else: + raise _convertException('The parameter was not converted to list type.') +@@ -216,7 +216,7 @@ class IBaseStyle: + return self._parameters[parameterName] + + def availableParameters(self): +- names = self._parameters.keys() ++ names = list(self._parameters.keys()) + names.sort() + return names + +@@ -262,5 +262,5 @@ class IBaseStyle: + + + import paida.paida_gui.PRoot +-if not locals().has_key('fontList'): ++if 'fontList' not in locals(): + fontList, defaultFont = paida.paida_gui.PRoot.getFontList(['Courier', 'courier']) +--- paida/paida_core/IFilter.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IFilter.py +@@ -28,7 +28,7 @@ class IFilter: + return bool(eval(self._code, self._globals, {'_row': self._tupleRows[self._tupleObject._rowIndex]})) + else: + if self._count >= self._rowsToProcess: +- raise IndexError, "Reached to the specified rowsToProcess." ++ raise IndexError("Reached to the specified rowsToProcess.") + else: + self._count += 1 + return bool(eval(self._code, self._globals, {'_row': self._tupleRows[self._tupleObject._rowIndex]})) +--- paida/paida_core/IFitter.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IFitter.py +@@ -83,12 +83,12 @@ class IFitter: + return self._fitMethod + + def fitParameterSettings(self, name): +- if not self._fitParameterSettings.has_key(name): ++ if name not in self._fitParameterSettings: + self._fitParameterSettings[name] = IFitParameterSettings(name) + return self._fitParameterSettings[name] + + def listParameterSettings(self): +- return self._fitParameterSettings.keys() ++ return list(self._fitParameterSettings.keys()) + + def resetParameterSettings(self): + self._fitParameterSettings = {} +@@ -139,13 +139,13 @@ class IFitter: + self._checkFitType(fitData) + _function = data2 + guessed = False +- elif isinstance(data1, IFitData) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, IFitData) and isinstance(data2, (str,)) and (data3 == None): + fitData = data1 + self._checkFitType(fitData) + _functionFactory = IFunctionFactory(None) + _function = _functionFactory.createFunctionByName(data2, data2, inner = True) + guessed = False +- elif isinstance(data1, IFitData) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, IFitData) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = data1 + self._checkFitType(fitData) + _functionFactory = IFunctionFactory(None) +@@ -186,84 +186,84 @@ class IFitter: + fitData.create3DConnection(data1) + return self.fit(fitData, data2) + +- elif isinstance(data1, IHistogram1D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, IHistogram1D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create1DConnection(data1) + return self.fit(fitData, data2) +- elif isinstance(data1, IProfile1D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, IProfile1D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create1DConnection(data1) + return self.fit(fitData, data2) +- elif isinstance(data1, ICloud1D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, ICloud1D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create1DConnection(data1) + return self.fit(fitData, data2) +- elif isinstance(data1, IHistogram2D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, IHistogram2D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create2DConnection(data1) + return self.fit(fitData, data2) +- elif isinstance(data1, IProfile2D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, IProfile2D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create2DConnection(data1) + return self.fit(fitData, data2) +- elif isinstance(data1, ICloud2D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, ICloud2D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create2DConnection(data1) + return self.fit(fitData, data2) +- elif isinstance(data1, IHistogram3D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, IHistogram3D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create3DConnection(data1) + return self.fit(fitData, data2) +- elif isinstance(data1, ICloud3D) and isinstance(data2, types.StringTypes) and (data3 == None): ++ elif isinstance(data1, ICloud3D) and isinstance(data2, (str,)) and (data3 == None): + fitData = IFitData() + fitData.create3DConnection(data1) + return self.fit(fitData, data2) + +- elif isinstance(data1, IHistogram1D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, IHistogram1D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create1DConnection(data1) + return self.fit(fitData, data2, data3) +- elif isinstance(data1, IProfile1D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, IProfile1D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create1DConnection(data1) + return self.fit(fitData, data2, data3) +- elif isinstance(data1, ICloud1D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, ICloud1D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create1DConnection(data1) + return self.fit(fitData, data2, data3) +- elif isinstance(data1, IHistogram2D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, IHistogram2D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create2DConnection(data1) + return self.fit(fitData, data2, data3) +- elif isinstance(data1, IProfile2D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, IProfile2D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create2DConnection(data1) + return self.fit(fitData, data2, data3) +- elif isinstance(data1, ICloud2D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, ICloud2D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create2DConnection(data1) + return self.fit(fitData, data2, data3) +- elif isinstance(data1, IHistogram3D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, IHistogram3D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create3DConnection(data1) + return self.fit(fitData, data2, data3) +- elif isinstance(data1, ICloud3D) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): ++ elif isinstance(data1, ICloud3D) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): + fitData = IFitData() + fitData.create3DConnection(data1) + return self.fit(fitData, data2, data3) + + elif isinstance(data1, IDataPointSet) and isinstance(data2, IFunction) and (data3 == None): +- indices = range(data1.dimension()) ++ indices = list(range(data1.dimension())) + fitData = IFitData() + fitData.createConnection(data1, indices[:-1], indices[-1]) + return self.fit(fitData, data2) +- elif isinstance(data1, IDataPointSet) and isinstance(data2, types.StringTypes) and (data3 == None): +- indices = range(data1.dimension()) ++ elif isinstance(data1, IDataPointSet) and isinstance(data2, (str,)) and (data3 == None): ++ indices = list(range(data1.dimension())) + fitData = IFitData() + fitData.createConnection(data1, indices[:-1], indices[-1]) + return self.fit(fitData, data2) +- elif isinstance(data1, IDataPointSet) and isinstance(data2, types.StringTypes) and hasattr(data3, '__iter__'): +- indices = range(data1.dimension()) ++ elif isinstance(data1, IDataPointSet) and isinstance(data2, (str,)) and hasattr(data3, '__iter__'): ++ indices = list(range(data1.dimension())) + fitData = IFitData() + fitData.createConnection(data1, indices[:-1], indices[-1]) + return self.fit(fitData, data2, data3) +@@ -362,7 +362,7 @@ class IFitter: + raise RuntimeError() + + ### Verbose mode? +- if self._option.has_key('verbose'): ++ if 'verbose' in self._option: + if self._option['verbose'] == True: + verbose = True + else: +@@ -376,7 +376,7 @@ class IFitter: + elif engineName in ['SimpleGA', 'GA']: + minimum, hessian, warnflag, mesg = geneticAlgorithm(evaluatorValue, evaluatorGradient, evaluatorHessian, evaluatorParameterSpace, freeParameterNames, limits, constraints, freeIndices, fixedIndices, ndf, display = verbose) + else: +- raise RuntimeError, 'Unknown engine name:', engineName ++ raise RuntimeError('Unknown engine name:').with_traceback(engineName) + resultValues = _function.parameters() + + ### Is valid? +@@ -540,7 +540,7 @@ class IFitter: + for parameterIndex, parameterName in enumerate(function.parameterNames()): + parameterValue = function.parameter(parameterName) + ### This parameter has any setting? +- if fitParameterSettings.has_key(parameterName): ++ if parameterName in fitParameterSettings: + setting = fitParameterSettings[parameterName] + ### This parameter is fixed or bound? + if setting.isFixed(): +--- paida/paida_core/IFunction.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IFunction.py +@@ -89,7 +89,7 @@ class IFunction: + codeletList.append(expression) + codeletList.append('catalog') + else: +- raise ValueError, 'Unknown typeName "%s".' % typeName ++ raise ValueError('Unknown typeName "%s".' % typeName) + self._codeletString = ':'.join(codeletList) + + def _getParentFactory(self): +@@ -146,12 +146,12 @@ class IFunction: + for constraint in constraints: + ### Jython2.1 doesn't understand exec(code, globals(), locals()) properly. + #eval(constraint, _normalNameSpace, parameterNameSpace) +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fp = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + parameterNameSpace.update(currents) + parameterNameSpace[parameterName] -= eps2 + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fm = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + result = (fp - fm) / 2.0 / eps2 + parameterNameSpace.update(currents) +@@ -205,42 +205,42 @@ class IFunction: + + if parameterIndex1 == parameterIndex2: + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fc = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + parameterNameSpace.update(currents) + parameterNameSpace[parameterName1] += eps + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fp = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + parameterNameSpace.update(currents) + parameterNameSpace[parameterName1] -= eps + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fm = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + result = (fp + fm - 2.0 * fc) / eps**2 + else: + parameterNameSpace[parameterName1] += eps + parameterNameSpace[parameterName2] += eps + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fpp = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + parameterNameSpace.update(currents) + parameterNameSpace[parameterName1] -= eps + parameterNameSpace[parameterName2] -= eps + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fmm = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + parameterNameSpace.update(currents) + parameterNameSpace[parameterName1] += eps + parameterNameSpace[parameterName2] -= eps + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fpm = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + parameterNameSpace.update(currents) + parameterNameSpace[parameterName1] -= eps + parameterNameSpace[parameterName2] += eps + for constraint in constraints: +- exec constraint in innerNameSpace, parameterNameSpace ++ exec(constraint, innerNameSpace, parameterNameSpace) + fmp = eval(compiledDeriv0, innerNameSpace, parameterNameSpace) + result = (fpp + fmm - fpm - fmp) / eps**2 / 4.0 + +--- paida/paida_core/IFunctionFactory.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IFunctionFactory.py +@@ -311,7 +311,7 @@ class IFunctionFactory: + + def _log(self, data): + if data._0 in self._zeros: +- raise ValueError, 'Called log(0.0).' ++ raise ValueError('Called log(0.0).') + elif data._1 in self._zeros: + _0 = 'log(%s)' % (data._0) + _1 = '0.0' +@@ -424,7 +424,7 @@ class IFunctionFactory: + evalNameSpace = {} + eval2NameSpace = {} + evalExpression = '' +- functionNameList = innerNameSpace.keys() ++ functionNameList = list(innerNameSpace.keys()) + try: + while 1: + item = parser.get_token() +@@ -515,7 +515,7 @@ class IFunctionFactory: + parser = _Shlex(expression) + evalNameSpace = {} + evalExpression = '' +- functionNameList = innerNameSpace.keys() ++ functionNameList = list(innerNameSpace.keys()) + try: + while 1: + item = parser.get_token() +@@ -561,28 +561,28 @@ class IFunctionFactory: + if item in functionNameList: + item2 = parser.get_token() + if item2 != '.': +- raise RuntimeError, 'Expected "." but "%s".' % (item2) ++ raise RuntimeError('Expected "." but "%s".' % (item2)) + item2 = parser.get_token() + if item2 != '_getDeriv1': +- raise RuntimeError, 'Expected "_getDeriv1" but "%s".' % (item2) ++ raise RuntimeError('Expected "_getDeriv1" but "%s".' % (item2)) + item2 = parser.get_token() + if item2 != '(': +- raise RuntimeError, 'Expected "(" but "%s".' % (item2) ++ raise RuntimeError('Expected "(" but "%s".' % (item2)) + item2 = parser.get_token() + if item2 != '_parameterNameSpace_': +- raise RuntimeError, 'Expected "_parameterNameSpace_" but "%s".' % (item2) ++ raise RuntimeError('Expected "_parameterNameSpace_" but "%s".' % (item2)) + item2 = parser.get_token() + if item2 != ',': +- raise RuntimeError, 'Expected "," but "%s".' % (item2) ++ raise RuntimeError('Expected "," but "%s".' % (item2)) + item2 = parser.get_token() + if item2 != ' ': +- raise RuntimeError, 'Expected " " but "%s".' % (item2) ++ raise RuntimeError('Expected " " but "%s".' % (item2)) + + deriv1Index = int(parser.get_token()) + + item2 = parser.get_token() + if item2 != ')': +- raise RuntimeError, 'Expected ")" but "%s".' % (item2) ++ raise RuntimeError('Expected ")" but "%s".' % (item2)) + + innerFunction = innerNameSpace[item] + if innerFunction._deriv1 == None: +@@ -624,7 +624,7 @@ class IFunctionFactory: + def createFunctionByName(self, path, expression, parameterNamePrefix = None, inner = False): + parameterNames, dimension = self._getParameterNamesByName(expression) + if parameterNames == None: +- raise ValueError, 'The expression contains unknown function name.' ++ raise ValueError('The expression contains unknown function name.') + newParameterNames = [] + for parameterName in parameterNames: + if parameterNamePrefix == None: +@@ -655,7 +655,7 @@ class IFunctionFactory: + if inner == False: + if self._catalog.add(name, newFunction) == False: + ### Catalogging failed. +- raise RuntimeError, 'Catalogging "%s" function failed.' % name ++ raise RuntimeError('Catalogging "%s" function failed.' % name) + self._tree._mkObject(path, newFunction) + return newFunction + +@@ -705,7 +705,7 @@ class IFunctionFactory: + if inner == False: + if self._catalog.add(name, newFunction) == False: + ### Catalogging failed. +- raise RuntimeError, 'Catalogging "%s" function failed.' % name ++ raise RuntimeError('Catalogging "%s" function failed.' % name) + self._tree._mkObject(path, newFunction) + return newFunction + +--- paida/paida_core/IPlotter.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IPlotter.py +@@ -61,7 +61,7 @@ class IPlotter: + return self._parameters[parameterName] + + def availableParameters(self): +- names = self._parameters.keys() ++ names = list(self._parameters.keys()) + names.sort() + return names + +@@ -222,22 +222,22 @@ class IPlotter: + y = self._tabY + w = 1.0 - 2 * x + h = 1.0 - 2 * y +- elif (type(x) == types.FloatType) and (y == None) and (w == None) and (h == None): ++ elif (type(x) == float) and (y == None) and (w == None) and (h == None): + x = float(x) + y = self._tabY + w = 1.0 - x - self._tabX + h = 1.0 - 2 * y +- elif (type(x) == types.FloatType) and (type(y) == types.FloatType) and (w == None) and (h == None): ++ elif (type(x) == float) and (type(y) == float) and (w == None) and (h == None): + x = float(x) + y = float(y) + w = 1.0 - x - self._tabX + h = 1.0 - y - self._tabY +- elif (type(x) == types.FloatType) and (type(y) == types.FloatType) and (type(w) == types.FloatType) and (h == None): ++ elif (type(x) == float) and (type(y) == float) and (type(w) == float) and (h == None): + x = float(x) + y = float(y) + w = float(w) + h = 1.0 - y - self._tabY +- elif (type(x) == types.FloatType) and (type(y) == types.FloatType) and (type(w) == types.FloatType) and (type(h) == types.FloatType): ++ elif (type(x) == float) and (type(y) == float) and (type(w) == float) and (type(h) == float): + x = float(x) + y = float(y) + w = float(w) +@@ -258,12 +258,12 @@ class IPlotter: + columns = 1 + rows = 1 + index = 0 +- elif (type(columns) == types.IntType) and (rows == None) and (index == None): ++ elif (type(columns) == int) and (rows == None) and (index == None): + rows = 1 + index = 0 +- elif (type(columns) == types.IntType) and (type(rows) == types.IntType) and (index == None): ++ elif (type(columns) == int) and (type(rows) == int) and (index == None): + index = 0 +- elif (type(columns) == types.IntType) and (type(rows) == types.IntType) and (type(index) == types.IntType): ++ elif (type(columns) == int) and (type(rows) == int) and (type(index) == int): + pass + else: + raise IllegalArgumentException() +@@ -294,7 +294,7 @@ class IPlotter: + else: + raise IllegalArgumentException() + +- def next(self): ++ def __next__(self): + self.setCurrentRegionNumber((self.currentRegionNumber() + 1) % self.numberOfRegions()) + return self.currentRegion() + +@@ -389,7 +389,7 @@ class IPlotter: + self._getGuiPlotter().setImageWrite(fileName, self._parameterData('landscape'), fileType) + + else: +- raise RuntimeError, 'Unknown GUI engine name "%s".' % engineName ++ raise RuntimeError('Unknown GUI engine name "%s".' % engineName) + + def _postScriptCreate(self, fileName): + ### Get postscript strings. +@@ -467,7 +467,7 @@ class IPlotter: + self._getGuiPlotter().setTitle(title) + + def setTitle(self, title): +- if type(title) in types.StringTypes: ++ if type(title) in (str,): + self._setWindowTitle(title) + tags = ['globalTitle'] + guiPlotter = self._getGuiPlotter() +--- paida/paida_core/IPlotterRegion.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IPlotterRegion.py +@@ -592,10 +592,10 @@ class IPlotterRegion: + elif (isinstance(data1, IHistogram1D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, IHistogram1D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, IHistogram1D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, IHistogram1D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, IHistogram1D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -605,10 +605,10 @@ class IPlotterRegion: + elif (isinstance(data1, IHistogram2D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, IHistogram2D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, IHistogram2D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, IHistogram2D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, IHistogram2D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -618,10 +618,10 @@ class IPlotterRegion: + elif (isinstance(data1, IHistogram3D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, IHistogram3D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, IHistogram3D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, IHistogram3D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, IHistogram3D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -631,10 +631,10 @@ class IPlotterRegion: + elif (isinstance(data1, ICloud1D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, ICloud1D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, ICloud1D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, ICloud1D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, ICloud1D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -644,10 +644,10 @@ class IPlotterRegion: + elif (isinstance(data1, ICloud2D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, ICloud2D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, ICloud2D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, ICloud2D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, ICloud2D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -657,10 +657,10 @@ class IPlotterRegion: + elif (isinstance(data1, ICloud3D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, ICloud3D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, ICloud3D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, ICloud3D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, ICloud3D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -670,10 +670,10 @@ class IPlotterRegion: + elif (isinstance(data1, IProfile1D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, IProfile1D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, IProfile1D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, IProfile1D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, IProfile1D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -683,10 +683,10 @@ class IPlotterRegion: + elif (isinstance(data1, IProfile2D)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, IProfile2D)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, IProfile2D)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, IProfile2D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, IProfile2D)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -696,10 +696,10 @@ class IPlotterRegion: + elif (isinstance(data1, IFunction)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, IFunction)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, IFunction)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, IFunction)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, IFunction)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -709,10 +709,10 @@ class IPlotterRegion: + elif (isinstance(data1, IDataPointSet)) and (isinstance(data2, IPlotterStyle)) and (data3 == None): + plotterStyle = data2 + options = optionAnalyzer(None) +- elif (isinstance(data1, IDataPointSet)) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (isinstance(data1, IDataPointSet)) and (type(data2) in (str,)) and (data3 == None): + plotterStyle = self.style() + options = optionAnalyzer(data2) +- elif (isinstance(data1, IDataPointSet)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in types.StringTypes): ++ elif (isinstance(data1, IDataPointSet)) and (isinstance(data2, IPlotterStyle)) and (type(data3) in (str,)): + plotterStyle = data2 + options = optionAnalyzer(data3) + +@@ -816,7 +816,7 @@ class IPlotterRegion: + return False + + def _needReplace(self, options): +- if options.has_key('mode'): ++ if 'mode' in options: + if options['mode'] == 'replace': + return True + elif options['mode'] == 'overlay': +@@ -830,7 +830,7 @@ class IPlotterRegion: + def _needRescale(self, options): + if self._needReplace(options): + return False +- elif options.has_key('rescale'): ++ elif 'rescale' in options: + if options['rescale'] == True: + if self._getNItemData() == 1: + return False +@@ -868,63 +868,63 @@ class IPlotterRegion: + def _getOrderedBins2D(self, axisX, axisY, surfaceYZz, surfaceZXz): + if surfaceYZz < 0.0 and surfaceZXz < 0.0: + #0 +- binsX = range(axisX.bins() - 1, -1, -1) +- binsY = range(axisY.bins() - 1, -1, -1) ++ binsX = list(range(axisX.bins() - 1, -1, -1)) ++ binsY = list(range(axisY.bins() - 1, -1, -1)) + elif surfaceYZz < 0.0 and surfaceZXz >= 0.0: + #3 +- binsX = range(axisX.bins() - 1, -1, -1) +- binsY = range(axisY.bins()) ++ binsX = list(range(axisX.bins() - 1, -1, -1)) ++ binsY = list(range(axisY.bins())) + elif surfaceYZz >= 0.0 and surfaceZXz < 0.0: + #1 +- binsX = range(axisX.bins()) +- binsY = range(axisY.bins() - 1, -1, -1) ++ binsX = list(range(axisX.bins())) ++ binsY = list(range(axisY.bins() - 1, -1, -1)) + elif surfaceYZz >= 0.0 and surfaceZXz >= 0.0: + #2 +- binsX = range(axisX.bins()) +- binsY = range(axisY.bins()) ++ binsX = list(range(axisX.bins())) ++ binsY = list(range(axisY.bins())) + return binsX, binsY + + def _getOrderedBins3D(self, axisX, axisY, axisZ, surfaceXYz, surfaceYZz, surfaceZXz): + if surfaceXYz < 0.0 and surfaceYZz < 0.0 and surfaceZXz < 0.0: + #0 +- binsX = range(axisX.bins() - 1, -1, -1) +- binsY = range(axisY.bins() - 1, -1, -1) +- binsZ = range(axisZ.bins() - 1, -1, -1) ++ binsX = list(range(axisX.bins() - 1, -1, -1)) ++ binsY = list(range(axisY.bins() - 1, -1, -1)) ++ binsZ = list(range(axisZ.bins() - 1, -1, -1)) + elif surfaceXYz < 0.0 and surfaceYZz < 0.0 and surfaceZXz >= 0.0: + #3 +- binsX = range(axisX.bins() - 1, -1, -1) +- binsY = range(axisY.bins()) +- binsZ = range(axisZ.bins() - 1, -1, -1) ++ binsX = list(range(axisX.bins() - 1, -1, -1)) ++ binsY = list(range(axisY.bins())) ++ binsZ = list(range(axisZ.bins() - 1, -1, -1)) + elif surfaceXYz < 0.0 and surfaceYZz >= 0.0 and surfaceZXz < 0.0: + #1 +- binsX = range(axisX.bins()) +- binsY = range(axisY.bins() - 1, -1, -1) +- binsZ = range(axisZ.bins() - 1, -1, -1) ++ binsX = list(range(axisX.bins())) ++ binsY = list(range(axisY.bins() - 1, -1, -1)) ++ binsZ = list(range(axisZ.bins() - 1, -1, -1)) + elif surfaceXYz < 0.0 and surfaceYZz >= 0.0 and surfaceZXz >= 0.0: + #2 +- binsX = range(axisX.bins()) +- binsY = range(axisY.bins()) +- binsZ = range(axisZ.bins() - 1, -1, -1) ++ binsX = list(range(axisX.bins())) ++ binsY = list(range(axisY.bins())) ++ binsZ = list(range(axisZ.bins() - 1, -1, -1)) + elif surfaceXYz >= 0.0 and surfaceYZz < 0.0 and surfaceZXz < 0.0: + #4 +- binsX = range(axisX.bins() - 1, -1, -1) +- binsY = range(axisY.bins() - 1, -1, -1) +- binsZ = range(axisZ.bins()) ++ binsX = list(range(axisX.bins() - 1, -1, -1)) ++ binsY = list(range(axisY.bins() - 1, -1, -1)) ++ binsZ = list(range(axisZ.bins())) + elif surfaceXYz >= 0.0 and surfaceYZz < 0.0 and surfaceZXz >= 0.0: + #7 +- binsX = range(axisX.bins() - 1, -1, -1) +- binsY = range(axisY.bins()) +- binsZ = range(axisZ.bins()) ++ binsX = list(range(axisX.bins() - 1, -1, -1)) ++ binsY = list(range(axisY.bins())) ++ binsZ = list(range(axisZ.bins())) + elif surfaceXYz >= 0.0 and surfaceYZz >= 0.0 and surfaceZXz < 0.0: + #5 +- binsX = range(axisX.bins()) +- binsY = range(axisY.bins() - 1, -1, -1) +- binsZ = range(axisZ.bins()) ++ binsX = list(range(axisX.bins())) ++ binsY = list(range(axisY.bins() - 1, -1, -1)) ++ binsZ = list(range(axisZ.bins())) + elif surfaceXYz >= 0.0 and surfaceYZz >= 0.0 and surfaceZXz >= 0.0: + #6 +- binsX = range(axisX.bins()) +- binsY = range(axisY.bins()) +- binsZ = range(axisZ.bins()) ++ binsX = list(range(axisX.bins())) ++ binsY = list(range(axisY.bins())) ++ binsZ = list(range(axisZ.bins())) + return binsX, binsY, binsZ + + def _plotHistogram1D(self, item, plotterStyle, options): +@@ -973,7 +973,7 @@ class IPlotterRegion: + parameterShowMarkers = dataStyle._parameterData('showMarkers') + parameterShowErrorBars = dataStyle._parameterData('showErrorBars') + binsX = [axisX.UNDERFLOW_BIN] +- binsX.extend(range(axisX.bins())) ++ binsX.extend(list(range(axisX.bins()))) + binsX.append(axisX.OVERFLOW_BIN) + previousHeight = y1 + tempFillLineStyle = ILineStyle() +@@ -1057,7 +1057,7 @@ class IPlotterRegion: + parameterShowErrorBars = dataStyle._parameterData('showErrorBars') + + if parameterFormat == 'histogram': +- if options.has_key('nBinsX'): ++ if 'nBinsX' in options: + nBinsX = int(options['nBinsX']) + else: + nBinsX = 50 +@@ -1144,7 +1144,7 @@ class IPlotterRegion: + parameterShowMarkers = dataStyle._parameterData('showMarkers') + parameterShowErrorBars = dataStyle._parameterData('showErrorBars') + binsX = [axisX.UNDERFLOW_BIN] +- binsX.extend(range(axisX.bins())) ++ binsX.extend(list(range(axisX.bins()))) + binsX.append(axisX.OVERFLOW_BIN) + tempErrorLineStyle = ILineStyle() + if dataStyle._getCustomized('errorBarsColor'): +@@ -1299,13 +1299,13 @@ class IPlotterRegion: + bestLowerY, bestUpperY = self._getAxisValueRangeY() + else: + ### X range. +- if options.has_key('minX'): ++ if 'minX' in options: + bestLowerX = float(options['minX']) + elif self._getXLimits()[0] != None: + bestLowerX = self._getXLimits()[0] + else: + bestLowerX = -10.0 +- if options.has_key('maxX'): ++ if 'maxX' in options: + bestUpperX = float(options['maxX']) + elif self._getXLimits()[1] != None: + bestUpperX = self._getXLimits()[1] +@@ -1313,13 +1313,13 @@ class IPlotterRegion: + bestUpperX = 10.0 + + ### Y range. +- if options.has_key('minY'): ++ if 'minY' in options: + bestLowerY = float(options['minY']) + elif self._getYLimits()[0] != None: + bestLowerY = self._getYLimits()[0] + else: + bestLowerY = -10.0 +- if options.has_key('maxY'): ++ if 'maxY' in options: + bestUpperY = float(options['maxY']) + elif self._getYLimits()[1] != None: + bestUpperY = self._getYLimits()[1] +@@ -1336,11 +1336,11 @@ class IPlotterRegion: + convertX, convertY = self._getConvertersToCanvas() + + ### Plot. +- if options.has_key('minX'): ++ if 'minX' in options: + functionLowerX = float(options['minX']) + else: + functionLowerX = lowerX +- if options.has_key('maxX'): ++ if 'maxX' in options: + functionUpperX = float(options['maxX']) + else: + functionUpperX = upperX +@@ -1627,11 +1627,11 @@ class IPlotterRegion: + parameterShowErrorBars = dataStyle._parameterData('showErrorBars') + + if parameterFormat == 'histogram': +- if options.has_key('nBinsX'): ++ if 'nBinsX' in options: + nBinsX = int(options['nBinsX']) + else: + nBinsX = 50 +- if options.has_key('nBinsY'): ++ if 'nBinsY' in options: + nBinsY = int(options['nBinsY']) + else: + nBinsY = 50 +@@ -2071,13 +2071,13 @@ class IPlotterRegion: + bestLowerZ, bestUpperZ = self._getAxisValueRangeZ() + else: + ### X range. +- if options.has_key('minX'): ++ if 'minX' in options: + bestLowerX = float(options['minX']) + elif self._getXLimits()[0] != None: + bestLowerX = self._getXLimits()[0] + else: + bestLowerX = -10.0 +- if options.has_key('maxX'): ++ if 'maxX' in options: + bestUpperX = float(options['maxX']) + elif self._getXLimits()[1] != None: + bestUpperX = self._getXLimits()[1] +@@ -2085,13 +2085,13 @@ class IPlotterRegion: + bestUpperX = 10.0 + + ### Y range. +- if options.has_key('minY'): ++ if 'minY' in options: + bestLowerY = float(options['minY']) + elif self._getYLimits()[0] != None: + bestLowerY = self._getYLimits()[0] + else: + bestLowerY = -10.0 +- if options.has_key('maxY'): ++ if 'maxY' in options: + bestUpperY = float(options['maxY']) + elif self._getYLimits()[1] != None: + bestUpperY = self._getYLimits()[1] +@@ -2099,13 +2099,13 @@ class IPlotterRegion: + bestUpperY = 10.0 + + ### Z range. +- if options.has_key('minZ'): ++ if 'minZ' in options: + bestLowerZ = float(options['minZ']) + elif self._getZLimits()[0] != None: + bestLowerZ = self._getZLimits()[0] + else: + bestLowerZ = -10.0 +- if options.has_key('maxZ'): ++ if 'maxZ' in options: + bestUpperZ = float(options['maxZ']) + elif self._getZLimits()[1] != None: + bestUpperZ = self._getZLimits()[1] +@@ -2159,19 +2159,19 @@ class IPlotterRegion: + ticksY.sort() + ticksZ.sort() + +- if options.has_key('minX'): ++ if 'minX' in options: + bestLowerX = float(options['minX']) + else: + bestLowerX = lowerX +- if options.has_key('maxX'): ++ if 'maxX' in options: + bestUpperX = float(options['maxX']) + else: + bestUpperX = upperX +- if options.has_key('minY'): ++ if 'minY' in options: + bestLowerY = float(options['minY']) + else: + bestLowerY = lowerY +- if options.has_key('maxY'): ++ if 'maxY' in options: + bestUpperY = float(options['maxY']) + else: + bestUpperY = upperY +@@ -2499,15 +2499,15 @@ class IPlotterRegion: + parameterShowErrorBars = dataStyle._parameterData('showErrorBars') + + if parameterFormat == 'histogram': +- if options.has_key('nBinsX'): ++ if 'nBinsX' in options: + nBinsX = int(options['nBinsX']) + else: + nBinsX = 50 +- if options.has_key('nBinsY'): ++ if 'nBinsY' in options: + nBinsY = int(options['nBinsY']) + else: + nBinsY = 50 +- if options.has_key('nBinsZ'): ++ if 'nBinsZ' in options: + nBinsZ = int(options['nBinsZ']) + else: + nBinsZ = 50 +@@ -4452,7 +4452,7 @@ class IPlotterRegion: + spanT = 2.0 * factor + subDivider = 4.0 + else: +- raise RuntimeError, '%s:%s:%s:%s' % (rangeMin, rangeMax, scaling, axisType) ++ raise RuntimeError('%s:%s:%s:%s' % (rangeMin, rangeMax, scaling, axisType)) + + minI = int(floor(rangeMin / spanT)) + maxI = int(rangeMax / spanT) +@@ -4512,7 +4512,7 @@ class IPlotterRegion: + elif logLengthS == 9: + logSpan = 2 * logFactor + else: +- raise RuntimeError, '%s:%s:%s:%s' % (rangeMin, rangeMax, scaling, axisType) ++ raise RuntimeError('%s:%s:%s:%s' % (rangeMin, rangeMax, scaling, axisType)) + + current = logMinF + while (current <= logMax + pat): +@@ -4528,7 +4528,7 @@ class IPlotterRegion: + ticksSub.append(tickValue) + + else: +- raise RuntimeError, '%s:%s:%s:%s' % (rangeMin, rangeMax, scaling, axisType) ++ raise RuntimeError('%s:%s:%s:%s' % (rangeMin, rangeMax, scaling, axisType)) + + return tickLower, tickUpper, ticksMain, ticksSub + +@@ -4655,7 +4655,7 @@ class IPlotterRegion: + return self._parameters[parameterName] + + def availableParameters(self): +- names = self._parameters.keys() ++ names = list(self._parameters.keys()) + names.sort() + return names + +--- paida/paida_core/IRangeSet.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/IRangeSet.py +@@ -7,7 +7,7 @@ class InfinityBase: + def __abs__(self): + return InfinityPositive() + +- def __nonzero__(self): ++ def __bool__(self): + return True + + def __radd__(self, v): +@@ -20,11 +20,11 @@ class InfinityBase: + return self.__mul__(v) + + def __rdiv__(self, v): +- if type(v) == types.IntType: ++ if type(v) == int: + return 0 +- elif type(v) == types.LongType: +- return 0L +- elif type(v) == types.FloatType: ++ elif type(v) == int: ++ return 0 ++ elif type(v) == float: + return 0.0 + else: + raise RuntimeException("Inf cannot divide %s" % (type(v))) +@@ -79,22 +79,22 @@ class InfinityPositive(InfinityBase): + if v < 0: + raise RuntimeException("negative value power by positive Inf") + elif v == 0: +- if type(v) == types.IntType: ++ if type(v) == int: + return 0 +- elif type(v) == types.LongType: +- return 0L +- elif type(v) == types.FloatType: ++ elif type(v) == int: ++ return 0 ++ elif type(v) == float: + return 0.0 + else: + raise RuntimeException() + elif 0 < v < 1: + return 0.0 + elif v == 1: +- if type(v) == types.IntType: ++ if type(v) == int: + return 1 +- elif type(v) == types.LongType: +- return 1L +- elif type(v) == types.FloatType: ++ elif type(v) == int: ++ return 1 ++ elif type(v) == float: + return 1.0 + else: + raise RuntimeException() +@@ -168,7 +168,7 @@ class InfinityNegative(InfinityBase): + return InfinityNegative() + + def __pow__(self, v): +- if type(v) == types.IntType: ++ if type(v) == int: + if (v % 2) == 0: + return InfinityPositive() + else: +@@ -193,20 +193,20 @@ class InfinityNegative(InfinityBase): + elif 0 < v < 1: + return InfinityPositive() + elif v == 1: +- if type(v) == types.IntType: ++ if type(v) == int: + return 1 +- elif type(v) == types.LongType: +- return 1L +- elif type(v) == types.FloatType: ++ elif type(v) == int: ++ return 1 ++ elif type(v) == float: + return 1.0 + else: + raise RuntimeException() + elif 1 < v: +- if type(v) == types.IntType: ++ if type(v) == int: + return 0 +- elif type(v) == types.LongType: +- return 0L +- elif type(v) == types.FloatType: ++ elif type(v) == int: ++ return 0 ++ elif type(v) == float: + return 0.0 + else: + raise RuntimeException() +--- paida/paida_core/ITreeElementTree.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/ITreeElementTree.py +@@ -31,9 +31,9 @@ encoding = 'ISO-8859-1' + class _baseHandler(object): + directions = {'x': 0, 'y': 1, 'z': 2} + def evaluator(self, data): +- if data == u'OVERFLOW': ++ if data == 'OVERFLOW': + return IAxis.OVERFLOW_BIN +- elif data == u'UNDERFLOW': ++ elif data == 'UNDERFLOW': + return IAxis.UNDERFLOW_BIN + elif data == 'NaN': + return 0.0 +@@ -49,7 +49,7 @@ class _annotationHandler(_baseHandler): + ### Some implementations ommit annotation node when its content is empty. + if element is not None: + for item in element.findall('item'): +- self.annotation._addItem(str(item.get('key')), str(item.get('value')), item.get('sticky') == u'true') ++ self.annotation._addItem(str(item.get('key')), str(item.get('value')), item.get('sticky') == 'true') + + + # in contrast to namedHandler, it is not assumed here that the +@@ -431,7 +431,7 @@ class _tupleHandler(_namedParser): + + def process_columns(self, fileIterator): + while 1: +- event, item = fileIterator.next() ++ event, item = next(fileIterator) + name = item.tag + if name == 'columns': + if event == 'start': +@@ -472,7 +472,7 @@ class _tupleHandler(_namedParser): + ### I think column will be always initialized to zero at (name == 'row and event == 'start')? + #column = 0 + while 1: +- event, item = fileIterator.next() ++ event, item = next(fileIterator) + name = item.tag + if name == 'entry' and event == 'end': + tuple._rowBuffer[column] = converters[column](item.get('value')) +@@ -508,7 +508,7 @@ class _cloud1dHandler(_namedParser): + fill = self.cloud1d.fill + self.entries1d = False + while 1: +- event, item = fileIterator.next() ++ event, item = next(fileIterator) + name = item.tag + if name == 'entries1d': + self.entries1d = True +@@ -555,7 +555,7 @@ class _cloud2dHandler(_namedParser): + + self.entries2d = False + while 1: +- event, item = fileIterator.next() ++ event, item = next(fileIterator) + name = item.tag + if name == 'entries2d': + self.entries2d = True +@@ -611,7 +611,7 @@ class _cloud3dHandler(_namedParser): + fill = self.cloud3d.fill + self.entries3d = False + while 1: +- event, item = fileIterator.next() ++ event, item = next(fileIterator) + name = item.tag + if name == 'entries3d': + self.entries3d = True +@@ -830,7 +830,7 @@ class _dataPointSetHandler(_namedHandler): + + def process_dataPoint(self, fileIterator, toBeCleared): + while 1: +- event, item = fileIterator.next() ++ event, item = next(fileIterator) + name = item.tag + if name == 'dataPoint': + if event == 'start': +@@ -861,7 +861,7 @@ class _dataPointSetHandler(_namedHandler): + class _functionHandler(_namedHandler): + def __init__(self, element, functionFactory): + _namedHandler.__init__(self, element) +- self.isNormalized = (element.get('isNormalized') == u'true') ++ self.isNormalized = (element.get('isNormalized') == 'true') + codelet = self.process_codelet(element.find('codelet')) + self.function = functionFactory._createCopy(codelet, self.name, inner = False) + self.process_parameters(element.find('parameters')) +@@ -922,8 +922,8 @@ class ITree: + fileObj.read(1) + fileObj.seek(0) + except ImportError: +- print 'PAIDA: gzip module is unavailable.' +- print 'PAIDA: all files are treated as unzipped.' ++ print('PAIDA: gzip module is unavailable.') ++ print('PAIDA: all files are treated as unzipped.') + fileObj = file(fileName, 'r') + except IOError: + fileObj.close() +@@ -1447,9 +1447,9 @@ class ITree: + xmlEscape = self._xmlEscape + ### Base. + if object._option is None: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, `object.lowerEdge()`, `object.upperEdge()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, repr(object.lowerEdge()), repr(object.upperEdge()), footer)) + else: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), `object.lowerEdge()`, `object.upperEdge()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), repr(object.lowerEdge()), repr(object.upperEdge()), footer)) + + ### Annotation. + self._writeAnnotation(storeFile, object, header + '\t', footer) +@@ -1468,9 +1468,9 @@ class ITree: + xmlEscape = self._xmlEscape + ### Base. + if object._option is None: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), footer)) + else: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), footer)) + + ### Annotation. + self._writeAnnotation(storeFile, object, header + '\t', footer) +@@ -1489,9 +1489,9 @@ class ITree: + xmlEscape = self._xmlEscape + ### Base. + if object._option is None: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, `object.lowerEdgeZ()`, `object.upperEdgeZ()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), repr(object.lowerEdgeZ()), repr(object.upperEdgeZ()), footer)) + else: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, `object.lowerEdgeZ()`, `object.upperEdgeZ()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), repr(object.lowerEdgeZ()), repr(object.upperEdgeZ()), footer)) + + ### Annotation. + self._writeAnnotation(storeFile, object, header + '\t', footer) +@@ -1549,10 +1549,10 @@ class ITree: + + for i in range(dimension): + axis = axes[i] +- storeFile.write('%s%s' % (header, directions[i], `axis.lowerEdge()`, `axis.upperEdge()`, axis.bins(), footer)) ++ storeFile.write('%s%s' % (header, directions[i], repr(axis.lowerEdge()), repr(axis.upperEdge()), axis.bins(), footer)) + if axis.isFixedBinning() == False: + for i in range(axis.bins() - 1): +- storeFile.write('%s\t%s' % (header, `axis.binUpperEdge(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(axis.binUpperEdge(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeStatistics(self, storeFile, object, header, footer): +@@ -1700,7 +1700,7 @@ class ITree: + def _writeParameters(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for parameterName in object.parameterNames(): +- storeFile.write('%s\t%s' % (header, self._xmlEscape(parameterName), `object.parameter(parameterName)`, footer)) ++ storeFile.write('%s\t%s' % (header, self._xmlEscape(parameterName), repr(object.parameter(parameterName)), footer)) + storeFile.write(header + '' + footer) + + def _writeDataPoint(self, storeFile, object, header, footer): +@@ -1709,25 +1709,25 @@ class ITree: + dataPoint = object.point(i) + for j in range(dataPoint.dimension()): + measurement = dataPoint.coordinate(j) +- storeFile.write('%s\t%s' % (header, `measurement.value()`, `measurement.errorPlus()`, `measurement.errorMinus()`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(measurement.value()), repr(measurement.errorPlus()), repr(measurement.errorMinus()), footer)) + storeFile.write(header + '' + footer) + + def _writeEntries1d(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for i in range(object.entries()): +- storeFile.write('%s\t%s' % (header, `object.value(i)`, `object.weight(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(object.value(i)), repr(object.weight(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeEntries2d(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for i in range(object.entries()): +- storeFile.write('%s\t%s' % (header, `object.valueX(i)`, `object.valueY(i)`, `object.weight(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(object.valueX(i)), repr(object.valueY(i)), repr(object.weight(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeEntries3d(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for i in range(object.entries()): +- storeFile.write('%s\t%s' % (header, `object.valueX(i)`, `object.valueY(i)`, `object.valueZ(i)`, `object.weight(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(object.valueX(i)), repr(object.valueY(i)), repr(object.valueZ(i)), repr(object.weight(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeColumns(self, storeFile, object, header, footer): +@@ -1751,7 +1751,7 @@ class ITree: + elif columnType in [PTypes.String, PTypes.Character]: + storeFile.write('%s\t%s' % (header, columnName, xmlEscape(columnDefault), columnType.TYPE, footer)) + else: +- storeFile.write('%s\t%s' % (header, columnName, `columnDefault`, columnType.TYPE, footer)) ++ storeFile.write('%s\t%s' % (header, columnName, repr(columnDefault), columnType.TYPE, footer)) + storeFile.write(header + '' + footer) + + def _columnWalker(self, expression, ituple): +@@ -1775,7 +1775,7 @@ class ITree: + elif columnType in [PTypes.String, PTypes.Character]: + expression += '%s %s="%s", ' % (columnType.TYPE, columnName, columnDefault) + else: +- expression += '%s %s=%s, ' % (columnType.TYPE, columnName, `columnDefault`) ++ expression += '%s %s=%s, ' % (columnType.TYPE, columnName, repr(columnDefault)) + if expression.endswith(', '): + return self._xmlEscape(expression[:-2] + '}') + else: +@@ -1835,7 +1835,7 @@ class ITree: + elif columnType in PTypesStringCharacter: + storeFileWrite('%s\t%s' % (header, xmlEscape(rowData[columnIndex]), footer)) + else: +- storeFileWrite('%s\t%s' % (header, `rowData[columnIndex]`, footer)) ++ storeFileWrite('%s\t%s' % (header, repr(rowData[columnIndex]), footer)) + storeFileWrite(xml_rowEnd) + + def _xmlEscape(self, data): +@@ -1851,7 +1851,7 @@ class ITree: + ### Store file creation. + try: + import gzip +- if self._options.has_key('compress'): ++ if 'compress' in self._options: + if self._options['compress']: + storeFile = gzip.open(self._fileName, 'w') + else: +@@ -1859,8 +1859,8 @@ class ITree: + else: + storeFile = gzip.open(self._fileName, 'w') + except ImportError: +- print 'PAIDA: gzip module is unavailable.' +- print 'PAIDA: all files are saved as unzipped.' ++ print('PAIDA: gzip module is unavailable.') ++ print('PAIDA: all files are saved as unzipped.') + storeFile = file(self._fileName, 'w') + + ### Initial strings. +--- paida/paida_core/ITreeOld.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/ITreeOld.py +@@ -26,7 +26,7 @@ import sys + import os + import xml.sax + import xml.sax.saxutils +-import StringIO ++import io + from math import sqrt + + encoding = 'ISO-8859-1' +@@ -45,7 +45,7 @@ class _handler_initial(xml.sax.handler.ContentHandler) + + class _handler_EntityResolver(xml.sax.handler.EntityResolver): + def resolveEntity(self, publicId, systemId): +- return StringIO.StringIO() ++ return io.StringIO() + + class _handler_base(xml.sax.handler.ContentHandler): + def __init__(self, parent): +@@ -54,9 +54,9 @@ class _handler_base(xml.sax.handler.ContentHandler): + self.parser = parent.parser + + def evaluator(self, data): +- if data == u'OVERFLOW': ++ if data == 'OVERFLOW': + return IAxis.OVERFLOW_BIN +- elif data == u'UNDERFLOW': ++ elif data == 'UNDERFLOW': + return IAxis.UNDERFLOW_BIN + elif data == 'NaN': + return 0.0 +@@ -237,7 +237,7 @@ class _handler_annotation(_handler_base): + class _handler_item(_handler_base): + def startElement(self, name, attributes): + try: +- if attributes['sticky'] == u'true': ++ if attributes['sticky'] == 'true': + self.parent.annotation._addItem(str(attributes['key']), str(attributes['value']), True) + else: + self.parent.annotation._addItem(str(attributes['key']), str(attributes['value']), False) +@@ -459,7 +459,7 @@ class _handler_bin1d_histogram1d(_handler_base): + error = self.evaluator(attributes['error']) + except KeyError: + error = sqrt(entries) +- if attributes.has_key('error2'): ++ if 'error2' in attributes: + raise RuntimeError('error2 is not supported in histograms.') + + try: +@@ -599,7 +599,7 @@ class _handler_bin2d_histogram2d(_handler_base): + error = self.evaluator(attributes['error']) + except KeyError: + error = sqrt(entries) +- if attributes.has_key('error2'): ++ if 'error2' in attributes: + raise RuntimeError('error2 is not supported in histograms.') + + try: +@@ -767,7 +767,7 @@ class _handler_bin3d_histogram3d(_handler_base): + error = self.evaluator(attributes['error']) + except KeyError: + error = sqrt(entries) +- if attributes.has_key('error2'): ++ if 'error2' in attributes: + raise RuntimeError('error2 is not supported in histograms.') + + try: +@@ -1548,7 +1548,7 @@ class _handler_bin1d_profile1d(_handler_base): + error = self.evaluator(attributes['error']) + except KeyError: + error = sqrt(entries) +- if attributes.has_key('error2'): ++ if 'error2' in attributes: + raise RuntimeError('error2 is not supported in histograms.') + + try: +@@ -1722,7 +1722,7 @@ class _handler_bin2d_profile2d(_handler_base): + error = self.evaluator(attributes['error']) + except KeyError: + error = sqrt(entries) +- if attributes.has_key('error2'): ++ if 'error2' in attributes: + raise RuntimeError('error2 is not supported in histograms.') + + try: +@@ -1922,8 +1922,8 @@ class _handler_function(_handler_base): + except KeyError: + self.path = '/' + +- if attributes.has_key('isNormalized'): +- if attributes['isNormalized'] == u'true': ++ if 'isNormalized' in attributes: ++ if attributes['isNormalized'] == 'true': + self.isNormalized = True + else: + self.isNormalized = False +@@ -2104,8 +2104,8 @@ class ITree: + fileObj.close() + fileObj = gzip.open(fileName) + except ImportError: +- print 'PAIDA: gzip module is unavailable.' +- print 'PAIDA: all files are treated as unzipped.' ++ print('PAIDA: gzip module is unavailable.') ++ print('PAIDA: all files are treated as unzipped.') + fileObj = file(fileName, 'r') + except IOError: + fileObj = file(fileName, 'r') +@@ -2124,9 +2124,9 @@ class ITree: + except xml.sax._exceptions.SAXParseException: + errorMessage = 'The file "%s" is broken. Stopped reading.' % fileName + if readOnly: +- raise IOError, errorMessage ++ raise IOError(errorMessage) + else: +- print errorMessage ++ print(errorMessage) + fileObj.close() + + def _setGuiTree(self, guiTree): +@@ -2644,9 +2644,9 @@ class ITree: + xmlEscape = self._xmlEscape + ### Base. + if object._option == None: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, `object.lowerEdge()`, `object.upperEdge()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, repr(object.lowerEdge()), repr(object.upperEdge()), footer)) + else: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), `object.lowerEdge()`, `object.upperEdge()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), repr(object.lowerEdge()), repr(object.upperEdge()), footer)) + + ### Annotation. + self._writeAnnotation(storeFile, object, header + '\t', footer) +@@ -2665,9 +2665,9 @@ class ITree: + xmlEscape = self._xmlEscape + ### Base. + if object._option == None: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), footer)) + else: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), footer)) + + ### Annotation. + self._writeAnnotation(storeFile, object, header + '\t', footer) +@@ -2686,9 +2686,9 @@ class ITree: + xmlEscape = self._xmlEscape + ### Base. + if object._option == None: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, `object.lowerEdgeZ()`, `object.upperEdgeZ()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), repr(object.lowerEdgeZ()), repr(object.upperEdgeZ()), footer)) + else: +- storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), `object.lowerEdgeX()`, `object.upperEdgeX()`, `object.lowerEdgeY()`, `object.upperEdgeY()`, `object.lowerEdgeZ()`, `object.upperEdgeZ()`, footer)) ++ storeFile.write('%s%s' % (header, xmlEscape(object._name), object.maxEntries(), xmlEscape(object.title()), path, xmlEscape(object._getOptionString()), repr(object.lowerEdgeX()), repr(object.upperEdgeX()), repr(object.lowerEdgeY()), repr(object.upperEdgeY()), repr(object.lowerEdgeZ()), repr(object.upperEdgeZ()), footer)) + + ### Annotation. + self._writeAnnotation(storeFile, object, header + '\t', footer) +@@ -2746,10 +2746,10 @@ class ITree: + + for i in range(dimension): + axis = axes[i] +- storeFile.write('%s%s' % (header, directions[i], `axis.lowerEdge()`, `axis.upperEdge()`, axis.bins(), footer)) ++ storeFile.write('%s%s' % (header, directions[i], repr(axis.lowerEdge()), repr(axis.upperEdge()), axis.bins(), footer)) + if axis.isFixedBinning() == False: + for i in range(axis.bins() - 1): +- storeFile.write('%s\t%s' % (header, `axis.binUpperEdge(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(axis.binUpperEdge(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeStatistics(self, storeFile, object, header, footer): +@@ -2897,7 +2897,7 @@ class ITree: + def _writeParameters(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for parameterName in object.parameterNames(): +- storeFile.write('%s\t%s' % (header, self._xmlEscape(parameterName), `object.parameter(parameterName)`, footer)) ++ storeFile.write('%s\t%s' % (header, self._xmlEscape(parameterName), repr(object.parameter(parameterName)), footer)) + storeFile.write(header + '' + footer) + + def _writeDataPoint(self, storeFile, object, header, footer): +@@ -2906,25 +2906,25 @@ class ITree: + dataPoint = object.point(i) + for j in range(dataPoint.dimension()): + measurement = dataPoint.coordinate(j) +- storeFile.write('%s\t%s' % (header, `measurement.value()`, `measurement.errorPlus()`, `measurement.errorMinus()`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(measurement.value()), repr(measurement.errorPlus()), repr(measurement.errorMinus()), footer)) + storeFile.write(header + '' + footer) + + def _writeEntries1d(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for i in range(object.entries()): +- storeFile.write('%s\t%s' % (header, `object.value(i)`, `object.weight(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(object.value(i)), repr(object.weight(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeEntries2d(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for i in range(object.entries()): +- storeFile.write('%s\t%s' % (header, `object.valueX(i)`, `object.valueY(i)`, `object.weight(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(object.valueX(i)), repr(object.valueY(i)), repr(object.weight(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeEntries3d(self, storeFile, object, header, footer): + storeFile.write(header + '' + footer) + for i in range(object.entries()): +- storeFile.write('%s\t%s' % (header, `object.valueX(i)`, `object.valueY(i)`, `object.valueZ(i)`, `object.weight(i)`, footer)) ++ storeFile.write('%s\t%s' % (header, repr(object.valueX(i)), repr(object.valueY(i)), repr(object.valueZ(i)), repr(object.weight(i)), footer)) + storeFile.write(header + '' + footer) + + def _writeColumns(self, storeFile, object, header, footer): +@@ -2948,7 +2948,7 @@ class ITree: + elif columnType in [PTypes.String, PTypes.Character]: + storeFile.write('%s\t%s' % (header, columnName, xmlEscape(columnDefault), columnType.TYPE, footer)) + else: +- storeFile.write('%s\t%s' % (header, columnName, `columnDefault`, columnType.TYPE, footer)) ++ storeFile.write('%s\t%s' % (header, columnName, repr(columnDefault), columnType.TYPE, footer)) + storeFile.write(header + '' + footer) + + def _columnWalker(self, expression, ituple): +@@ -2972,7 +2972,7 @@ class ITree: + elif columnType in [PTypes.String, PTypes.Character]: + expression += '%s %s="%s", ' % (columnType.TYPE, columnName, columnDefault) + else: +- expression += '%s %s=%s, ' % (columnType.TYPE, columnName, `columnDefault`) ++ expression += '%s %s=%s, ' % (columnType.TYPE, columnName, repr(columnDefault)) + if expression.endswith(', '): + return self._xmlEscape(expression[:-2] + '}') + else: +@@ -3032,7 +3032,7 @@ class ITree: + elif columnType in PTypesStringCharacter: + storeFileWrite('%s\t%s' % (header, xmlEscape(rowData[columnIndex]), footer)) + else: +- storeFileWrite('%s\t%s' % (header, `rowData[columnIndex]`, footer)) ++ storeFileWrite('%s\t%s' % (header, repr(rowData[columnIndex]), footer)) + storeFileWrite(xml_rowEnd) + + def _xmlEscape(self, data): +@@ -3048,7 +3048,7 @@ class ITree: + ### Store file creation. + try: + import gzip +- if self._options.has_key('compress'): ++ if 'compress' in self._options: + if self._options['compress']: + storeFile = gzip.open(self._fileName, 'w') + else: +@@ -3056,8 +3056,8 @@ class ITree: + else: + storeFile = gzip.open(self._fileName, 'w') + except ImportError: +- print 'PAIDA: gzip module is unavailable.' +- print 'PAIDA: all files are saved as unzipped.' ++ print('PAIDA: gzip module is unavailable.') ++ print('PAIDA: all files are saved as unzipped.') + storeFile = file(self._fileName, 'w') + + ### Initial strings. +--- paida/paida_core/ITuple.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/ITuple.py +@@ -75,13 +75,13 @@ class ITuple(object): + + def skip(self, nRows): + if nRows < 0: +- raise ValueError, 'Must be positive.' ++ raise ValueError('Must be positive.') + elif self._rowIndex + nRows >= len(self._rows): +- raise ValueError, 'Beyond the rows range.' ++ raise ValueError('Beyond the rows range.') + else: + return self._rowIndex + nRows + +- def next(self): ++ def __next__(self): + if self._rowIndex + 1 >= len(self._rows): + return False + else: +@@ -90,7 +90,7 @@ class ITuple(object): + + def setRow(self, rowIndex): + if rowIndex >= len(self._rows): +- raise ValueError, 'Beyond the rows range.' ++ raise ValueError('Beyond the rows range.') + else: + self._rowIndex = rowIndex + +@@ -123,9 +123,9 @@ class ITuple(object): + + def getLong(self, columnIndex): + if self._rowIndex == -1: +- return long(self._rowBuffer[columnIndex]) ++ return int(self._rowBuffer[columnIndex]) + else: +- return long(self._rows[self._rowIndex][columnIndex]) ++ return int(self._rows[self._rowIndex][columnIndex]) + + def getChar(self, columnIndex): + if self._rowIndex == -1: +@@ -186,7 +186,7 @@ class ITuple(object): + columnConverter = self._columnConverters[columnIndex] + if columnConverter == int: + return PAIDA_Types.Integer +- elif columnConverter == long: ++ elif columnConverter == int: + return PAIDA_Types.Long + elif columnConverter == float: + return PAIDA_Types.Double +@@ -278,25 +278,25 @@ class ITuple(object): + evaluatDoubleX = evaluatorX.evaluateDouble + self.start() + if (filterObject == None) and (weightObject == None): +- while self.next(): ++ while next(self): + histogramFill(evaluatDoubleX()) + elif (filterObject != None) and (weightObject == None): + filterObject.initialize(self) + filterAccept = filterObject.accept +- while self.next(): ++ while next(self): + if filterAccept(): + histogramFill(evaluatDoubleX()) + elif (filterObject == None) and (weightObject != None): + weightObject.initialize(self) + weightDouble = weightObject.evaluateDouble +- while self.next(): ++ while next(self): + histogramFill(evaluatDoubleX(), weightDouble()) + else: + filterObject.initialize(self) + filterAccept = filterObject.accept + weightObject.initialize(self) + weightDouble = weightObject.evaluateDouble +- while self.next(): ++ while next(self): + if filterAccept(): + histogramFill(evaluatDoubleX(), weightDouble()) + +@@ -330,25 +330,25 @@ class ITuple(object): + evaluatDoubleY = evaluatorY.evaluateDouble + self.start() + if (filterObject == None) and (weightObject == None): +- while self.next(): ++ while next(self): + histogramFill(evaluatDoubleX(), evaluatDoubleY()) + elif (filterObject != None) and (weightObject == None): + filterObject.initialize(self) + filterAccept = filterObject.accept +- while self.next(): ++ while next(self): + if filterAccept(): + histogramFill(evaluatDoubleX(), evaluatDoubleY()) + elif (filterObject == None) and (weightObject != None): + weightObject.initialize(self) + weightDouble = weightObject.evaluateDouble +- while self.next(): ++ while next(self): + histogramFill(evaluatDoubleX(), evaluatDoubleY(), weightDouble()) + else: + filterObject.initialize(self) + filterAccept = filterObject.accept + weightObject.initialize(self) + weightDouble = weightObject.evaluateDouble +- while self.next(): ++ while next(self): + if filterAccept(): + histogramFill(evaluatDoubleX(), evaluatDoubleY(), weightDouble()) + +@@ -388,25 +388,25 @@ class ITuple(object): + evaluatDoubleZ = evaluatorZ.evaluateDouble + self.start() + if (filterObject == None) and (weightObject == None): +- while self.next(): ++ while next(self): + histogramFill(evaluatDoubleX(), evaluatDoubleY(), evaluatDoubleZ()) + elif (filterObject != None) and (weightObject == None): + filterObject.initialize(self) + filterAccept = filterObject.accept +- while self.next(): ++ while next(self): + if filterAccept(): + histogramFill(evaluatDoubleX(), evaluatDoubleY(), evaluatDoubleZ()) + elif (filterObject == None) and (weightObject != None): + weightObject.initialize(self) + weightDouble = weightObject.evaluateDouble +- while self.next(): ++ while next(self): + histogramFill(evaluatDoubleX(), evaluatDoubleY(), evaluatDoubleZ(), weightDouble()) + else: + filterObject.initialize(self) + filterAccept = filterObject.accept + weightObject.initialize(self) + weightDouble = weightObject.evaluateDouble +- while self.next(): ++ while next(self): + if filterAccept(): + histogramFill(evaluatDoubleX(), evaluatDoubleY(), evaluatDoubleZ(), weightDouble()) + +--- paida/paida_core/ITupleFactory.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/ITupleFactory.py +@@ -26,15 +26,15 @@ class ITupleFactory(object): + columnTypes = data2 + columnString = self._createColumnString(columnTypes, columnNames) + optionString = '' +- elif hasattr(data1, '__iter__') and hasattr(data2, '__iter__') and (type(data3) in types.StringTypes): ++ elif hasattr(data1, '__iter__') and hasattr(data2, '__iter__') and (type(data3) in (str,)): + columnNames = data1 + columnTypes = data2 + columnString = self._createColumnString(columnTypes, columnNames) + optionString = data3 +- elif (type(data1) in types.StringTypes) and (data2 == None) and (data3 == None): ++ elif (type(data1) in (str,)) and (data2 == None) and (data3 == None): + columnString = data1 + optionString = '' +- elif (type(data1) in types.StringTypes) and (type(data2) in types.StringTypes) and (data3 == None): ++ elif (type(data1) in (str,)) and (type(data2) in (str,)) and (data3 == None): + columnString = data1 + optionString = data2 + +@@ -106,7 +106,7 @@ class ITupleFactory(object): + converter = int + columnName = parser.get_token() + elif token in ['long']: +- converter = long ++ converter = int + columnName = parser.get_token() + elif token in ['float', 'double']: + converter = float +@@ -172,7 +172,7 @@ class ITupleFactory(object): + columnDefault = None + breakFlag = True + else: +- raise RuntimeError, 'Unknown character "%s"' % token ++ raise RuntimeError('Unknown character "%s"' % token) + + columnNames.append(columnName) + columnDefaults.append(columnDefault) +@@ -185,7 +185,7 @@ class ITupleFactory(object): + def createChained(self, name, title, dataList): + if dataList == []: + raise IllegalArgumentException() +- elif isinstance(dataList[0], types.StringTypes): ++ elif isinstance(dataList[0], (str,)): + tupleObjects = [] + for item in dataList: + tupleObjects.append(self._tree.find(item)) +@@ -208,7 +208,7 @@ class ITupleFactory(object): + + def createFiltered(self, name, tupleObject, filterObject, columnNames = None): + if columnNames == None: +- columnIndices = range(tupleObject.columns()) ++ columnIndices = list(range(tupleObject.columns())) + else: + columnIndices = [] + for columnName in columnNames: +@@ -269,9 +269,9 @@ class ITupleFactory(object): + def createFilter(self, expression, rowsToProcess = None, startingRow = None): + if (rowsToProcess == None) and (startingRow == None): + return IFilter(expression) +- elif isinstance(rowsToProcess, types.IntType) and (startingRow == None): ++ elif isinstance(rowsToProcess, int) and (startingRow == None): + return IFilter(expression, rowsToProcess) +- elif isinstance(rowsToProcess, types.IntType) and isinstance(startingRow, types.IntType): ++ elif isinstance(rowsToProcess, int) and isinstance(startingRow, int): + return IFilter(expression, rowsToProcess, startingRow) + else: + raise IllegalArgumentException() +--- paida/paida_core/PAbsorber.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/PAbsorber.py +@@ -1,3 +1,4 @@ ++from functools import reduce + ### enumerate + try: + _temp = enumerate +@@ -10,14 +11,6 @@ except NameError: + i += 1 + return result + +-### True, False +-try: +- _temp = True +- _temp = False +-except NameError: +- True = 1 +- False = 0 +- + ### bool + try: + _temp = bool +@@ -38,9 +31,9 @@ except NameError: + ### StringTypes + try: + import types +- _temp = types.StringTypes ++ _temp = (str,) + except AttributeError: +- types.StringTypes = (types.StringType, types.UnicodeType) ++ (str,) = (bytes, str) + + ### file() + try: +--- paida/paida_core/PExceptions.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_core/PExceptions.py +@@ -11,14 +11,14 @@ class BaseException: + message = self.getMessage() + if message == None: + message = '(No exception message)' +- print ' #########################' +- print ' # Exception Message #' +- print ' #########################' +- print ' ' + self.__class__.__name__ +- print ' =========================' +- print ' ' + message +- print ' #########################' +- print '' ++ print(' #########################') ++ print(' # Exception Message #') ++ print(' #########################') ++ print(' ' + self.__class__.__name__) ++ print(' =========================') ++ print(' ' + message) ++ print(' #########################') ++ print('') + + def getMessage(self): + return self.message +--- paida/paida_gui/PGuiSelector.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_gui/PGuiSelector.py +@@ -12,18 +12,18 @@ def setGuiEngineName(guiEngineName): + pass + elif guiEngineName == 'tkinter': + try: +- import Tkinter ++ import tkinter + except ImportError: +- print 'PAIDA: "tkinter" GUI engine is unavailable.' +- print 'PAIDA: "batch" GUI engine is selected.' ++ print('PAIDA: "tkinter" GUI engine is unavailable.') ++ print('PAIDA: "batch" GUI engine is selected.') + guiEngineName = 'batch' + elif guiEngineName == 'swing': + pass + elif guiEngineName == 'batch': + pass + else: +- print 'PAIDA: "%s" GUI engine was not found.' % guiEngineName +- print 'PAIDA: "batch" GUI engine is selected.' ++ print('PAIDA: "%s" GUI engine was not found.' % guiEngineName) ++ print('PAIDA: "batch" GUI engine is selected.') + guiEngineName = 'batch' + global _guiEngineName + _guiEngineName = guiEngineName +--- paida/paida_gui/swing/PRoot.py.orig 2022-03-18 21:34:04 UTC ++++ paida/paida_gui/swing/PRoot.py +@@ -871,7 +871,7 @@ class _Plotter(_Base): + self._edt_create_styledMarker(style, tags, styleX + fontWidth / 2.0, lineY + fontHalf) + style.setParameter('size', originalSize) + else: +- raise RuntimeError, 'Unknown style in legends.' ++ raise RuntimeError('Unknown style in legends.') + self._edt_create_styledText(textStyle, tags, descriptionX, lineY, description, NW) + lineY += fontHeight + spacerY + +@@ -1023,7 +1023,7 @@ class _Plotter(_Base): + elif lineType == 'dash-dot-dot': + return BasicStroke(lineStyle.thickness(), BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 2.0, [6.0, 2.0, 4.0, 2.0, 4.0, 2.0], 0.0) + else: +- raise RuntimeError, 'Unknown line type "%s".' % lineType ++ raise RuntimeError('Unknown line type "%s".' % lineType) + + def _getSwingColor(self, color): + if color == '': +@@ -1047,7 +1047,7 @@ class _Plotter(_Base): + g = int(color[5:7], 16) + b = int(color[9:11], 16) + else: +- raise RuntimeError, 'Unknown color name "%s".' % color ++ raise RuntimeError('Unknown color name "%s".' % color) + return Color(r, g, b) + elif color == 'white': + return Color.white +@@ -1066,7 +1066,7 @@ class _Plotter(_Base): + elif color == 'yellow': + return Color.yellow + else: +- raise RuntimeError, 'Unknown color name "%s".' % color ++ raise RuntimeError('Unknown color name "%s".' % color) + + def _edt_delete(self, tags): + panel = self._getViewComponent() +@@ -1226,7 +1226,7 @@ class _Plotter(_Base): + x0 = x + y0 = y + metrics.getAscent() + else: +- raise RuntimeError, 'Unknown anchor "%s".' % anchor ++ raise RuntimeError('Unknown anchor "%s".' % anchor) + + graphics = self._getGraphics() + graphics.setFont(swingFont) +--- paida/tools/TupleString.py.orig 2022-03-18 21:34:04 UTC ++++ paida/tools/TupleString.py +@@ -63,9 +63,9 @@ class TupleItem: + return tupleString + + def createTuple(): +- print 'Warning:' +- print 'createTuple() will be removed' +- print 'use create() alternatively' ++ print('Warning:') ++ print('createTuple() will be removed') ++ print('use create() alternatively') + return create() + + def create(): diff --git a/science/py-pygeometa/Makefile b/science/py-pygeometa/Makefile index fd867ea6cbe..382c4eba7f0 100644 --- a/science/py-pygeometa/Makefile +++ b/science/py-pygeometa/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= pygeometa -PORTVERSION= 0.9.0 +PORTVERSION= 0.9.1 CATEGORIES= science python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/science/py-pygeometa/distinfo b/science/py-pygeometa/distinfo index 0cbf7ae6622..20f4944581f 100644 --- a/science/py-pygeometa/distinfo +++ b/science/py-pygeometa/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058110 -SHA256 (pygeometa-0.9.0.tar.gz) = 2713d9c78598f3272f8b71a3ade9add41fb95641d0d15739cafbecfc9c607cdb -SIZE (pygeometa-0.9.0.tar.gz) = 45191 +TIMESTAMP = 1647264662 +SHA256 (pygeometa-0.9.1.tar.gz) = 0f599c02f1f49630c8a4529f47916ce62e78b6d2d07c4d1e3b097c786a19cd7e +SIZE (pygeometa-0.9.1.tar.gz) = 45197 diff --git a/science/py-pymol/Makefile b/science/py-pymol/Makefile index 0dd69a83fce..de97b468cef 100644 --- a/science/py-pymol/Makefile +++ b/science/py-pymol/Makefile @@ -3,7 +3,7 @@ PORTNAME= pymol DISTVERSIONPREFIX= v DISTVERSION= 2.4.0 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= science biology python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/science/py-pyteomics/Makefile b/science/py-pyteomics/Makefile index b484a2c4d71..d70a933d999 100644 --- a/science/py-pyteomics/Makefile +++ b/science/py-pyteomics/Makefile @@ -1,5 +1,5 @@ PORTNAME= pyteomics -PORTVERSION= 4.5.2 +PORTVERSION= 4.5.3 CATEGORIES= science python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/science/py-pyteomics/distinfo b/science/py-pyteomics/distinfo index ae98c8c97f9..ac9ead27111 100644 --- a/science/py-pyteomics/distinfo +++ b/science/py-pyteomics/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971124 -SHA256 (pyteomics-4.5.2.tar.gz) = 111bac1d553a42f3d704fa479963593e57986da729e4b57332374e32488d2ede -SIZE (pyteomics-4.5.2.tar.gz) = 199978 +TIMESTAMP = 1647264664 +SHA256 (pyteomics-4.5.3.tar.gz) = 066528ef9a0b2a0eb2f47beb3e2a7e920f91f464e7397b058a5fa2a8482d175b +SIZE (pyteomics-4.5.3.tar.gz) = 202088 diff --git a/science/py-qspin/files/patch-2to3 b/science/py-qspin/files/patch-2to3 new file mode 100644 index 00000000000..6114807db1b --- /dev/null +++ b/science/py-qspin/files/patch-2to3 @@ -0,0 +1,113 @@ +--- qspin/jones.py.orig 2016-08-28 06:00:41 UTC ++++ qspin/jones.py +@@ -71,61 +71,61 @@ def vsth(): + szc = 0.5*np.matrix([[1,-i],[i,1]]) - 0.5*np.matrix([[1,i],[-i,1]]) + + def photon(): +- print '----------------' +- print '|H>' ++ print('----------------') ++ print('|H>') + J,a,rho = jones(0,0,'blue') +- print 'J = ',J.T +- print 'a = ',a +- print 'rho = ' +- print rho +- print 'spins' +- print np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho) +- print '----------------' +- print '|V>' ++ print('J = ',J.T) ++ print('a = ',a) ++ print('rho = ') ++ print(rho) ++ print('spins') ++ print(np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho)) ++ print('----------------') ++ print('|V>') + J,a,rho = jones(pi,0,'blue') +- print 'J = ',J.T +- print 'a = ',a +- print 'rho = ' +- print rho +- print 'spins' +- print np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho) +- print '----------------' +- print '|D> = |H>+|V>' ++ print('J = ',J.T) ++ print('a = ',a) ++ print('rho = ') ++ print(rho) ++ print('spins') ++ print(np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho)) ++ print('----------------') ++ print('|D> = |H>+|V>') + J,a,rho = jones(pi/2,0,'green') +- print 'J = ',J.T +- print 'a = ',a +- print 'rho = ' +- print rho +- print 'spins' +- print np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho) +- print '----------------' +- print '|A> = |H>-|V>' ++ print('J = ',J.T) ++ print('a = ',a) ++ print('rho = ') ++ print(rho) ++ print('spins') ++ print(np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho)) ++ print('----------------') ++ print('|A> = |H>-|V>') + J,a,rho = jones(pi/2,pi,'green') +- print 'J = ',J.T +- print 'a = ',a +- print 'rho = ' +- print rho +- print 'spins' +- print np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho) +- print '----------------' +- print '|L> = |H>+i|V>' ++ print('J = ',J.T) ++ print('a = ',a) ++ print('rho = ') ++ print(rho) ++ print('spins') ++ print(np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho)) ++ print('----------------') ++ print('|L> = |H>+i|V>') + J,a,rho = jones(pi/2,pi/2,'red') +- print 'J = ',J.T +- print 'a = ',a +- print 'rho = ' +- print rho +- print 'spins' +- print np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho) +- print '----------------' +- print '|R> = |H>-i|V>' ++ print('J = ',J.T) ++ print('a = ',a) ++ print('rho = ') ++ print(rho) ++ print('spins') ++ print(np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho)) ++ print('----------------') ++ print('|R> = |H>-i|V>') + J,a,rho = jones(pi/2,-pi/2,'red') +- print 'J = ',J.T +- print 'a = ',a +- print 'rho = ' +- print rho +- print 'spins' +- print np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho) +- print '----------------' ++ print('J = ',J.T) ++ print('a = ',a) ++ print('rho = ') ++ print(rho) ++ print('spins') ++ print(np.trace(sx*rho),np.trace(sy*rho),np.trace(sz*rho)) ++ print('----------------') + + V = np.matrix([1,0]).T + H = np.matrix([0,1]).T diff --git a/science/py-segyio/Makefile b/science/py-segyio/Makefile index 7c16fa42e7c..22ff57c30ff 100644 --- a/science/py-segyio/Makefile +++ b/science/py-segyio/Makefile @@ -28,6 +28,8 @@ WRKSRC_SUBDIR= python post-install: @${STRIP_CMD} ${STAGEDIR}${PYTHONPREFIX_SITELIBDIR}/segyio/_segyio*.so + ${PYTHON_CMD} -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} + ${PYTHON_CMD} -O -m compileall -d ${PYTHON_SITELIBDIR} ${STAGEDIR}${PYTHON_SITELIBDIR} do-test: # tests fail: https://github.com/equinor/segyio/issues/511 @cd ${WRKSRC} && \ diff --git a/science/py-segyio/files/patch-2to3 b/science/py-segyio/files/patch-2to3 new file mode 100644 index 00000000000..5e35d56f2dc --- /dev/null +++ b/science/py-segyio/files/patch-2to3 @@ -0,0 +1,302 @@ +--- segyio/__init__.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/__init__.py +@@ -51,7 +51,7 @@ class Enum(object): + return int(self._value) + + def __str__(self): +- for k, v in self.__class__.__dict__.items(): ++ for k, v in list(self.__class__.__dict__.items()): + if isinstance(v, int) and self._value == v: + return k + return "Unknown Enum" +@@ -76,7 +76,7 @@ class Enum(object): + @classmethod + def enums(cls): + result = [] +- for v in cls.__dict__.values(): ++ for v in list(cls.__dict__.values()): + if isinstance(v, int): + result.append(cls(v)) + +--- segyio/create.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/create.py +@@ -200,7 +200,7 @@ def create(filename, spec): + + if endian not in endians: + problem = 'unknown endianness {}, expected one of: ' +- opts = ' '.join(endians.keys()) ++ opts = ' '.join(list(endians.keys())) + raise ValueError(problem.format(endian) + opts) + + fd = _segyio.segyiofd(str(filename), 'w+', endians[endian]) +--- segyio/depth.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/depth.py +@@ -172,7 +172,7 @@ class Depth(Sequence): + >>> depth[::2] = other + """ + if isinstance(depth, slice): +- for i, x in zip(range(*depth.indices(len(self))), val): ++ for i, x in zip(list(range(*depth.indices(len(self)))), val): + self[i] = x + return + +--- segyio/field.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/field.py +@@ -456,7 +456,7 @@ class Field(MutableMapping): + return False + + def intkeys(d): +- return { int(k): v for k, v in d.items() } ++ return { int(k): v for k, v in list(d.items()) } + + return intkeys(self) == intkeys(other) + +@@ -513,13 +513,13 @@ class Field(MutableMapping): + for key in other: + self.putfield(buf, int(key), other[key]) + elif hasattr(other, "keys"): +- for key in other.keys(): ++ for key in list(other.keys()): + self.putfield(buf, int(key), other[key]) + else: + for key, value in other: + self.putfield(buf, int(key), value) + +- for key, value in kwargs.items(): ++ for key, value in list(kwargs.items()): + self.putfield(buf, int(self._kwargs[key]), value) + + self.buf = buf +@@ -543,4 +543,4 @@ class Field(MutableMapping): + ).reload() + + def __repr__(self): +- return repr(self[self.keys()]) ++ return repr(self[list(self.keys())]) +--- segyio/gather.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/gather.py +@@ -119,7 +119,7 @@ class Gather(object): + offs = slice(off, off + 1, 1) + + xs = list(filter(self.offsets.__contains__, +- range(*offs.indices(self.offsets[-1]+1)))) ++ list(range(*offs.indices(self.offsets[-1]+1))))) + + empty = np.empty(0, dtype = self.trace.dtype) + # gather[int,int,:] +@@ -138,8 +138,8 @@ class Gather(object): + # buffered, and traces can be read from the iline. This is the + # least efficient when there are very few traces read per inline, + # but huge savings with larger subcubes +- last_il = self.iline.keys()[-1] + 1 +- last_xl = self.xline.keys()[-1] + 1 ++ last_il = list(self.iline.keys())[-1] + 1 ++ last_xl = list(self.xline.keys())[-1] + 1 + + il_slice = il if isslice(il) else slice(il, il+1) + xl_slice = xl if isslice(xl) else slice(xl, xl+1) +@@ -149,15 +149,15 @@ class Gather(object): + # but it's unnecessary to chck all keys up until the first xline + # because that will never be a hit anyway + if il_slice.start is None: +- start = self.iline.keys()[0] ++ start = list(self.iline.keys())[0] + il_slice = slice(start, il_slice.stop, il_slice.step) + + if xl_slice.start is None: +- start = self.xline.keys()[0] ++ start = list(self.xline.keys())[0] + xl_slice = slice(start, xl_slice.stop, xl_slice.step) + +- il_range = range(*il_slice.indices(last_il)) +- xl_range = range(*xl_slice.indices(last_xl)) ++ il_range = list(range(*il_slice.indices(last_il))) ++ xl_range = list(range(*xl_slice.indices(last_xl))) + + # the try-except-else is essentially a filter on in/xl keys, but + # delegates the work (and decision) to the iline and xline modes +@@ -372,7 +372,7 @@ class Groups(Mapping): + pass + + try: +- items = key.items() ++ items = list(key.items()) + except AttributeError: + items = iter(key) + +@@ -420,15 +420,15 @@ class Groups(Mapping): + return Group(key, self, self.bins[key]) + + def values(self): +- for key, index in self.bins.items(): ++ for key, index in list(self.bins.items()): + yield Group(key, self, index) + + def items(self): +- for key, index in self.bins.items(): ++ for key, index in list(self.bins.items()): + yield key, Group(key, self, index) + + def __iter__(self): +- return self.bins.keys() ++ return list(self.bins.keys()) + + def sort(self, fields): + """ +@@ -436,7 +436,7 @@ class Groups(Mapping): + """ + bins = collections.OrderedDict() + +- for key, index in self.bins.items(): ++ for key, index in list(self.bins.items()): + g = Group(key, self, index) + g.sort(fields) + bins[key] = g.index +--- segyio/line.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/line.py +@@ -91,12 +91,12 @@ class Line(Mapping): + if not isinstance(offset, slice): + offset = slice(offset, offset + 1) + +- index = sanitize_slice(index, self.heads.keys()) +- offset = sanitize_slice(offset, self.offsets.keys()) +- irange = range(*index.indices(max(self.heads.keys()) + 1)) +- orange = range(*offset.indices(max(self.offsets.keys()) + 1)) +- irange = filter(self.heads.__contains__, irange) +- orange = filter(self.offsets.__contains__, orange) ++ index = sanitize_slice(index, list(self.heads.keys())) ++ offset = sanitize_slice(offset, list(self.offsets.keys())) ++ irange = list(range(*index.indices(max(self.heads.keys()) + 1))) ++ orange = list(range(*offset.indices(max(self.offsets.keys()) + 1))) ++ irange = list(filter(self.heads.__contains__, irange)) ++ orange = list(filter(self.offsets.__contains__, orange)) + # offset-range is used in inner loops, so make it a list for + # reusability. offsets are usually few, so no real punishment by using + # non-generators here +@@ -344,7 +344,7 @@ class Line(Mapping): + + def items(self): + """D.values() -> generator of D's (key,values), as 2-tuples""" +- return zip(self.keys(), self[:]) ++ return zip(list(self.keys()), self[:]) + + class HeaderLine(Line): + """ +--- segyio/open.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/open.py +@@ -155,7 +155,7 @@ def open(filename, mode="r", iline = 189, + + if endian not in endians: + problem = 'unknown endianness {}, expected one of: ' +- opts = ' '.join(endians.keys()) ++ opts = ' '.join(list(endians.keys())) + raise ValueError(problem.format(endian) + opts) + + from . import _segyio +--- segyio/segy.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/segy.py +@@ -910,7 +910,7 @@ class SegyFile(object): + + if sorting not in valid_sortings: + error = "Invalid sorting" +- solution = "valid sorting options are: {}".format(valid_sortings.keys()) ++ solution = "valid sorting options are: {}".format(list(valid_sortings.keys())) + raise ValueError('{}, {}'.format(error, solution)) + + if offsets is None: +--- segyio/su/file.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/su/file.py +@@ -89,7 +89,7 @@ def open(filename, mode = 'r', iline = 189, + + if endian not in endians: + problem = 'unknown endianness, must be one of: ' +- candidates = ' '.join(endians.keys()) ++ candidates = ' '.join(list(endians.keys())) + raise ValueError(problem + candidates) + + from .. import _segyio +--- segyio/tools.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/tools.py +@@ -289,7 +289,7 @@ def rotation(f, line = 'fast'): + + if line not in lines: + error = "Unknown line {}".format(line) +- solution = "Must be any of: {}".format(' '.join(lines.keys())) ++ solution = "Must be any of: {}".format(' '.join(list(lines.keys()))) + raise ValueError('{} {}'.format(error, solution)) + + l = lines[line] +@@ -299,7 +299,7 @@ def rotation(f, line = 'fast'): + rot = f.xfd.rotation( len(l), + l.stride, + len(f.offsets), +- np.fromiter(l.keys(), dtype = np.intc) ) ++ np.fromiter(list(l.keys()), dtype = np.intc) ) + return rot, cdpx, cdpy + + def metadata(f): +@@ -466,7 +466,7 @@ def from_array(filename, data, iline=189, + data = np.asarray(data) + dimensions = len(data.shape) + +- if dimensions not in range(2, 5): ++ if dimensions not in list(range(2, 5)): + problem = "Expected 2, 3, or 4 dimensions, {} was given".format(dimensions) + raise ValueError(problem) + +--- segyio/trace.py.orig 2022-02-18 06:49:44 UTC ++++ segyio/trace.py +@@ -198,7 +198,7 @@ class Trace(Sequence): + step = 1 + single = True + +- n_elements = len(range(start, stop, step)) ++ n_elements = len(list(range(start, stop, step))) + + try: + i = self.wrapindex(i) +@@ -278,7 +278,7 @@ class Trace(Sequence): + + """ + if isinstance(i, slice): +- for j, x in zip(range(*i.indices(len(self))), val): ++ for j, x in zip(list(range(*i.indices(len(self)))), val): + self[j] = x + + return +@@ -388,7 +388,7 @@ class RawTrace(Trace): + msg = 'trace indices must be integers or slices, not {}' + raise TypeError(msg.format(type(i).__name__)) + start, _, step = indices +- length = len(range(*indices)) ++ length = len(list(range(*indices))) + buf = np.empty((length, self.shape), dtype = self.dtype) + return self.filehandle.gettr(buf, start, step, length, 0, self.shape, 1, self.shape) + +@@ -435,7 +435,7 @@ class RefTrace(Trace): + be useful in certain contexts to provide stronger guarantees. + """ + garbage = [] +- for i, (x, signature) in self.refs.items(): ++ for i, (x, signature) in list(self.refs.items()): + if sys.getrefcount(x) == 3: + garbage.append(i) + +@@ -841,7 +841,7 @@ class Attributes(Sequence): + field = self.field + + start, stop, step = i.indices(traces) +- indices = range(start, stop, step) ++ indices = list(range(start, stop, step)) + attrs = np.empty(len(indices), dtype = self.dtype) + return filehandle.field_forall(attrs, start, stop, step, field) + +@@ -955,7 +955,7 @@ class Text(Sequence): + msg = 'trace indices must be integers or slices, not {}' + raise TypeError(msg.format(type(i).__name__)) + +- for i, text in zip(range(*indices), val): ++ for i, text in zip(list(range(*indices)), val): + if isinstance(text, Text): + text = text[0] + self.filehandle.puttext(i, text) diff --git a/science/qbox/Makefile b/science/qbox/Makefile index 5350e3090d6..9cabd7f3d7b 100644 --- a/science/qbox/Makefile +++ b/science/qbox/Makefile @@ -1,6 +1,6 @@ PORTNAME= qbox DISTVERSIONPREFIX= rel -DISTVERSION= 1_74_1 +DISTVERSION= 1_74_2 CATEGORIES= science MAINTAINER= yuri@FreeBSD.org diff --git a/science/qbox/distinfo b/science/qbox/distinfo index 655bd82168c..7b09f529ca8 100644 --- a/science/qbox/distinfo +++ b/science/qbox/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1642177105 -SHA256 (qboxcode-qbox-public-rel1_74_1_GH0.tar.gz) = b53c05d6b0b433f8d307f00c7a6ecb44dc028677c518eaf2049a1357a87e2b91 -SIZE (qboxcode-qbox-public-rel1_74_1_GH0.tar.gz) = 711210 +TIMESTAMP = 1648348202 +SHA256 (qboxcode-qbox-public-rel1_74_2_GH0.tar.gz) = c6808fd6e55f014ed79d34bfb1356c216ea72a96bdbc1813e370c410aaafdfd4 +SIZE (qboxcode-qbox-public-rel1_74_2_GH0.tar.gz) = 711648 diff --git a/science/qmcpack/Makefile b/science/qmcpack/Makefile index da42810a5c1..c12c4f8aa70 100644 --- a/science/qmcpack/Makefile +++ b/science/qmcpack/Makefile @@ -1,7 +1,7 @@ PORTNAME= qmcpack DISTVERSIONPREFIX= v DISTVERSION= 3.13.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= science MAINTAINER= yuri@FreeBSD.org diff --git a/science/qwalk/Makefile b/science/qwalk/Makefile index 0536c33ea22..1be5cd6e3d6 100644 --- a/science/qwalk/Makefile +++ b/science/qwalk/Makefile @@ -1,7 +1,7 @@ PORTNAME= qwalk DISTVERSIONPREFIX= v DISTVERSION= 1.0.1-300 -PORTREVISION= 3 +PORTREVISION= 4 DISTVERSIONSUFFIX= -g1b7e381 CATEGORIES= science diff --git a/security/Makefile b/security/Makefile index 2a1e13aa239..8a8872adcb3 100644 --- a/security/Makefile +++ b/security/Makefile @@ -1017,6 +1017,7 @@ SUBDIR += py-taxii2-client SUBDIR += py-tls-parser SUBDIR += py-tlslite + SUBDIR += py-tlslite-ng SUBDIR += py-trezor SUBDIR += py-trustme SUBDIR += py-tuf diff --git a/security/beid/Makefile b/security/beid/Makefile index f1937afc620..d03ea144e8f 100644 --- a/security/beid/Makefile +++ b/security/beid/Makefile @@ -1,5 +1,6 @@ PORTNAME= beid PORTVERSION= 5.0.28 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= security diff --git a/security/belier/files/patch-2to3 b/security/belier/files/patch-2to3 new file mode 100644 index 00000000000..6b9777cc879 --- /dev/null +++ b/security/belier/files/patch-2to3 @@ -0,0 +1,141 @@ +--- belier/options.py.orig 2009-11-01 20:35:14 UTC ++++ belier/options.py +@@ -51,10 +51,10 @@ class Options: + if self._options[0].nomfichier is not None: + fichierentree = abspath(expanduser(self._options[0].nomfichier)) + if not exists(fichierentree): +- print _("%s : no such file") % fichierentree ++ print(_("%s : no such file") % fichierentree) + sys.exit(1) + elif not isfile(fichierentree): +- print _("%s is not a file") % fichierentree ++ print(_("%s is not a file") % fichierentree) + sys.exit(1) + else: + self._options[0].nomfichier = fichierentree +@@ -62,19 +62,19 @@ class Options: + if self._options[0].repsortie is not None: + repertoiresortie = abspath(expanduser(self._options[0].repsortie)) + if not exists(repertoiresortie): +- print _("%s : no such directory") % repertoiresortie ++ print(_("%s : no such directory") % repertoiresortie) + sys.exit(1) + elif not isdir(repertoiresortie): +- print _("%s is not a directory") % repertoiresortie ++ print(_("%s is not a directory") % repertoiresortie) + sys.exit(1) + else: + self._options[0].repsortie = repertoiresortie + + if self._options[0].delai is not None: + if self._options[0].delai < -1 or \ +- self._options[0].delai > sys.maxint: +- print _("The given value is not valid%sThe delay value \ +-must be >= -1 and <= value of an integer on your system" % linesep) ++ self._options[0].delai > sys.maxsize: ++ print(_("The given value is not valid%sThe delay value \ ++must be >= -1 and <= value of an integer on your system" % linesep)) + sys.exit(1) + + def lesoptions(self): +--- belier/optionstunnel.py.orig 2009-11-01 20:35:14 UTC ++++ belier/optionstunnel.py +@@ -30,15 +30,15 @@ class OptionsTunnel: + def parse_ligne(self, ligne): + """Parse la ligne qui indique les numéros de ports""" + if len(ligne.split()) != 2: +- print _('You should have two tunnel options arguments \ +-(source port and destination port)') ++ print(_('You should have two tunnel options arguments \ ++(source port and destination port)')) + sys.exit(1) + for port in ligne.split(): + if not port.isdigit(): +- print _('A port number should only contain digits') ++ print(_('A port number should only contain digits')) + sys.exit(1) + if int(port) > 65535: +- print _('A port number can not exceed 65535') ++ print(_('A port number can not exceed 65535')) + sys.exit(1) + self._source, self._destination = ligne.split() + +--- belier/terminal.py.orig 2009-11-01 20:35:14 UTC ++++ belier/terminal.py +@@ -21,7 +21,7 @@ import stat + from os import linesep, chmod, sep + from os.path import expanduser, abspath, join + +-from optionstunnel import OptionsTunnel ++from .optionstunnel import OptionsTunnel + + SSHOPTS = '-o NoHostAuthenticationForLocalhost=yes -o StrictHostKeyChecking=no' + +@@ -60,41 +60,41 @@ class Terminal: + self._ordres = open(expanduser(nomfichier), 'r').readlines() + else: + self._ordres = sys.stdin.readlines() +- except IOError, message: +- print message ++ except IOError as message: ++ print(message) + sys.exit(1) + except KeyboardInterrupt: +- print _("Belier has been stopped manually by the user") ++ print(_("Belier has been stopped manually by the user")) + sys.exit(1) + # deux passes pour étudier les ordres +- for boucle in xrange(2): +- for num in xrange(len(self._ordres)): ++ for boucle in range(2): ++ for num in range(len(self._ordres)): + # 1ère passe : on écarte les erreurs banales + if boucle == 0 and self._ordres[num] != linesep: + if '\0' in self._ordres[num]: +- print _("The file format is invalid \ +-It may be a binary file ?") ++ print(_("The file format is invalid \ ++It may be a binary file ?")) + sys.exit(1) + self._ordres[num] = self.remplace_guillemets_motdepasse( + self._ordres[num]) + if len(self._ordres[num].split(' ')) > 5: +- print _("Incorrect argument number \ +-on the order file line") ++ print(_("Incorrect argument number \ ++on the order file line")) + sys.exit(1) + identifiant = self._ordres[num].split(' ')[0] + if len(identifiant) <= 2 and identifiant != linesep: +- print _("A hostname must contain at \ +-least two characters (rfc952)") ++ print(_("A hostname must contain at \ ++least two characters (rfc952)")) + sys.exit(1) + ipoudns = identifiant.split('@')[-1] + if len(ipoudns) > 255: +- print _('Your domain name size \ +-exceeds 255 characters') ++ print(_('Your domain name size \ ++exceeds 255 characters')) + sys.exit(1) + for hostname in ipoudns.split('.'): + if len(hostname) > 64: +- print _("Your hostname size \ +-exceeds 64 characters") ++ print(_("Your hostname size \ ++exceeds 64 characters")) + sys.exit(1) + if self._ordres[num].split()[-1] == '-c'+ linesep or \ + self._ordres[num].split()[-1] == '-c': +@@ -175,8 +175,8 @@ exceeds 64 characters") + try: + open(resultat, 'w').writelines(self._script) + chmod(resultat, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR ) +- except IOError, message: +- print message ++ except IOError as message: ++ print(message) + sys.exit(1) + # compteur à zéro pour le prochain bloc d'ordres + self._script = [self._entetel1, self._entetel2] diff --git a/security/boringssl/Makefile b/security/boringssl/Makefile index dd215b62acf..64aa9e59a7b 100644 --- a/security/boringssl/Makefile +++ b/security/boringssl/Makefile @@ -1,5 +1,5 @@ PORTNAME= boringssl -PORTVERSION= 0.0.0.0.2022.03.21.01 +PORTVERSION= 0.0.0.0.2022.03.25.01 CATEGORIES= security MAINTAINER= osa@FreeBSD.org @@ -18,7 +18,7 @@ CPE_VENDOR= google USE_GITHUB= yes GH_ACCOUNT= google -GH_TAGNAME= 8bbefbf +GH_TAGNAME= c9a7dd6 GH_TUPLE= golang:crypto:c07d793c2f9a:golang_crypto/vendor/golang.org/x/crypto \ golang:net:04defd469f4e:golang_net/vendor/golang.org/x/net \ golang:sys:04245dca01da:golang_sys/vendor/golang.org/x/sys \ diff --git a/security/boringssl/distinfo b/security/boringssl/distinfo index 6c69357fad0..0109aaa6fad 100644 --- a/security/boringssl/distinfo +++ b/security/boringssl/distinfo @@ -1,6 +1,6 @@ -TIMESTAMP = 1647903819 -SHA256 (google-boringssl-0.0.0.0.2022.03.21.01-8bbefbf_GH0.tar.gz) = 21ea155e956355641dcc9387afe265f3ceaa329deef691f257fc7c68b5f114c1 -SIZE (google-boringssl-0.0.0.0.2022.03.21.01-8bbefbf_GH0.tar.gz) = 30869701 +TIMESTAMP = 1648332258 +SHA256 (google-boringssl-0.0.0.0.2022.03.25.01-c9a7dd6_GH0.tar.gz) = 21005b87f643780b9313e606328eb6ea212ab3b9951b0acd1889b3e29ac2944a +SIZE (google-boringssl-0.0.0.0.2022.03.25.01-c9a7dd6_GH0.tar.gz) = 30882269 SHA256 (golang-crypto-c07d793c2f9a_GH0.tar.gz) = bd1e0856c43287c6ebd20d59e2b4f2ffa91c5cc275dbb02bb2cfc822e6dc18a4 SIZE (golang-crypto-c07d793c2f9a_GH0.tar.gz) = 1732023 SHA256 (golang-net-04defd469f4e_GH0.tar.gz) = 6009a6f0989341df975799276ff1c49af650d4a4ed3fd6db3501b91eb601810d diff --git a/security/bzrtp/Makefile b/security/bzrtp/Makefile index f94c9a7c3eb..5a4592e6d8a 100644 --- a/security/bzrtp/Makefile +++ b/security/bzrtp/Makefile @@ -1,5 +1,6 @@ PORTNAME= bzrtp PORTVERSION= 1.0.6 +PORTREVISION= 1 CATEGORIES= security MASTER_SITES= https://www.linphone.org/releases/sources/bzrtp/ diff --git a/security/clamav-lts/Makefile b/security/clamav-lts/Makefile index 236488311aa..40e8788d966 100644 --- a/security/clamav-lts/Makefile +++ b/security/clamav-lts/Makefile @@ -1,5 +1,6 @@ PORTNAME= clamav PORTVERSION= 0.103.5 +PORTREVISION= 1 PORTEPOCH= 1 CATEGORIES= security MASTER_SITES= https://www.clamav.net/downloads/production/ diff --git a/security/gnome-keyring/Makefile b/security/gnome-keyring/Makefile index b6be7d27e5b..49ae6d76f7a 100644 --- a/security/gnome-keyring/Makefile +++ b/security/gnome-keyring/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-keyring PORTVERSION= 40.0 +PORTREVISION= 1 CATEGORIES= security gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/security/gnomint/Makefile b/security/gnomint/Makefile index b8d5aca1d20..d604cb12153 100644 --- a/security/gnomint/Makefile +++ b/security/gnomint/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnomint PORTVERSION= 1.2.1 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= security MASTER_SITES= SF diff --git a/security/gonepass/Makefile b/security/gonepass/Makefile index 1b065f33b22..8a19694603b 100644 --- a/security/gonepass/Makefile +++ b/security/gonepass/Makefile @@ -1,6 +1,6 @@ PORTNAME= gonepass DISTVERSION= g20181221 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= security MAINTAINER= greg@unrelenting.technology diff --git a/security/greenbone-security-assistant/Makefile b/security/greenbone-security-assistant/Makefile index dfe5c764529..ae9206498c5 100644 --- a/security/greenbone-security-assistant/Makefile +++ b/security/greenbone-security-assistant/Makefile @@ -1,7 +1,7 @@ PORTNAME= greenbone-security-assistant DISTVERSIONPREFIX= v DISTVERSION= 21.4.3 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= security MASTER_SITES= LOCAL/acm/gsa/:yarn_cache DISTFILES= ${PORTNAME}-cache${EXTRACT_SUFX}:yarn_cache diff --git a/security/gstreamer1-plugins-dtls/Makefile b/security/gstreamer1-plugins-dtls/Makefile index 6d1568887cf..0717c34ff9e 100644 --- a/security/gstreamer1-plugins-dtls/Makefile +++ b/security/gstreamer1-plugins-dtls/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= security COMMENT= GStreamer Datagram TLS (dtls) plugin diff --git a/security/gvm-libs/Makefile b/security/gvm-libs/Makefile index ce71ec7c29e..d960a5d9f99 100644 --- a/security/gvm-libs/Makefile +++ b/security/gvm-libs/Makefile @@ -1,5 +1,6 @@ PORTNAME= gvm DISTVERSION= 21.4.3 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= security PKGNAMESUFFIX= -libs diff --git a/security/gvmd/Makefile b/security/gvmd/Makefile index 6257c5cd75c..826f52bd670 100644 --- a/security/gvmd/Makefile +++ b/security/gvmd/Makefile @@ -1,5 +1,6 @@ PORTNAME= gvmd DISTVERSION= 21.4.4 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= security diff --git a/security/hashcat/Makefile b/security/hashcat/Makefile index 7fc1e358d8e..d545635c385 100644 --- a/security/hashcat/Makefile +++ b/security/hashcat/Makefile @@ -11,9 +11,8 @@ COMMENT= Advanced CPU-based password recovery utility LICENSE= MIT LICENSE_FILE= ${WRKSRC}/docs/license.txt -NOT_FOR_ARCHS= aarch64 powerpc powerpc64 powerpcspe sparc64 +NOT_FOR_ARCHS= powerpc powerpc64 powerpcspe sparc64 NOT_FOR_ARCHS_REASON= fails to compile: compiling for big-endian architecture not supported -NOT_FOR_ARCHS_REASON_aarch64= fails to compile BUILD_DEPENDS= minizip:archivers/minizip @@ -40,6 +39,8 @@ BRAIN_DESC= Build Hashcat Brain BRAIN_MAKE_ARGS_OFF= ENABLE_BRAIN=0 BRAIN_CFLAGS= -DWITH_BRAIN -Ideps/git/xxHash +CFLAGS_aarch64+= -march=armv8-a+crc+crypto + pre-install: ${STRIP_CMD} ${WRKSRC}/libhashcat.so.${PORTVERSION} ${STRIP_CMD} ${WRKSRC}/modules/*.so diff --git a/security/hashcat/files/patch-deps_LZMA-SDK_C_CpuArch.c b/security/hashcat/files/patch-deps_LZMA-SDK_C_CpuArch.c new file mode 100644 index 00000000000..a788aaec3e8 --- /dev/null +++ b/security/hashcat/files/patch-deps_LZMA-SDK_C_CpuArch.c @@ -0,0 +1,34 @@ +--- deps/LZMA-SDK/C/CpuArch.c.orig 2022-03-25 08:13:08 UTC ++++ deps/LZMA-SDK/C/CpuArch.c +@@ -384,6 +384,23 @@ BoolInt CPU_IsSupported_AES (void) { return APPLE_CRYP + + #include + ++#if defined(__FreeBSD__) ++static UInt64 get_hwcap() { ++ unsigned long hwcap; ++ if(elf_aux_info(AT_HWCAP, &hwcap, sizeof(unsigned long)) != 0) { ++ return(0); ++ } ++ return hwcap; ++} ++ ++BoolInt CPU_IsSupported_CRC32(void) { return get_hwcap() & HWCAP_CRC32; } ++BoolInt CPU_IsSupported_NEON(void) { return 1; } ++BoolInt CPU_IsSupported_SHA1(void){ return get_hwcap() & HWCAP_SHA1; } ++BoolInt CPU_IsSupported_SHA2(void) { return get_hwcap() & HWCAP_SHA2; } ++BoolInt CPU_IsSupported_AES(void) { return get_hwcap() & HWCAP_AES; } ++ ++#else // __FreeBSD__ ++ + #define USE_HWCAP + + #ifdef USE_HWCAP +@@ -410,6 +427,7 @@ MY_HWCAP_CHECK_FUNC (SHA1) + MY_HWCAP_CHECK_FUNC (SHA2) + MY_HWCAP_CHECK_FUNC (AES) + ++#endif // FreeBSD + #endif // __APPLE__ + #endif // _WIN32 + diff --git a/security/kc24/Makefile b/security/kc24/Makefile index 547c33c7566..857b0b1728f 100644 --- a/security/kc24/Makefile +++ b/security/kc24/Makefile @@ -2,7 +2,7 @@ PORTNAME= kc PORTVERSION= 2.4.2 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= security PKGNAMESUFFIX= 24 diff --git a/security/lasso/Makefile b/security/lasso/Makefile index 40a8c4c9a1a..f99a39944c0 100644 --- a/security/lasso/Makefile +++ b/security/lasso/Makefile @@ -2,6 +2,7 @@ PORTNAME= lasso PORTVERSION= 2.8.0 +PORTREVISION= 1 CATEGORIES= security MASTER_SITES= https://dev.entrouvert.org/releases/lasso/ diff --git a/security/lastpass-cli/Makefile b/security/lastpass-cli/Makefile index dd6349220da..92cdecfd91d 100644 --- a/security/lastpass-cli/Makefile +++ b/security/lastpass-cli/Makefile @@ -2,6 +2,7 @@ PORTNAME= lastpass-cli PORTVERSION= 1.3.3 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= security diff --git a/security/libsecret/Makefile b/security/libsecret/Makefile index c797f716c12..13e11d65c33 100644 --- a/security/libsecret/Makefile +++ b/security/libsecret/Makefile @@ -2,6 +2,7 @@ PORTNAME= libsecret PORTVERSION= 0.20.4 +PORTREVISION= 1 CATEGORIES= security gnome MASTER_SITES= GNOME diff --git a/security/modsecurity3/Makefile b/security/modsecurity3/Makefile index 6cffad6e831..84e08d64af2 100644 --- a/security/modsecurity3/Makefile +++ b/security/modsecurity3/Makefile @@ -1,6 +1,7 @@ PORTNAME= modsecurity DISTVERSIONPREFIX= v DISTVERSION= 3.0.5 +PORTREVISION= 1 CATEGORIES= security www MASTER_SITES= https://github.com/SpiderLabs/ModSecurity/releases/download/v${PORTVERSION}/ PKGNAMESUFFIX= 3 diff --git a/security/oath-toolkit/Makefile b/security/oath-toolkit/Makefile index 01ca333592c..aab7439583c 100644 --- a/security/oath-toolkit/Makefile +++ b/security/oath-toolkit/Makefile @@ -2,7 +2,7 @@ PORTNAME= oath-toolkit PORTVERSION= 2.6.7 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= security MASTER_SITES= SAVANNAH diff --git a/security/pgpdump/Makefile b/security/pgpdump/Makefile index 3b68939c705..99fa1cc4af5 100644 --- a/security/pgpdump/Makefile +++ b/security/pgpdump/Makefile @@ -1,7 +1,7 @@ # Created by: TAKAHASHI Kaoru PORTNAME= pgpdump -PORTVERSION= 0.34 +PORTVERSION= 0.35 CATEGORIES= security MASTER_SITES= https://www.mew.org/~kazu/proj/pgpdump/ diff --git a/security/pgpdump/distinfo b/security/pgpdump/distinfo index d8bd3d21a3a..40c0279efe3 100644 --- a/security/pgpdump/distinfo +++ b/security/pgpdump/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641045818 -SHA256 (pgpdump-0.34.tar.gz) = 989da5e880952514de3bfc443bb8534d49cbbfbe51ed6fde7d7684d531f60a20 -SIZE (pgpdump-0.34.tar.gz) = 77508 +TIMESTAMP = 1647264442 +SHA256 (pgpdump-0.35.tar.gz) = 4e02922dbd6309f371d52d336eef8f4dc0cd75d5140d0a3a795ff10185c9544f +SIZE (pgpdump-0.35.tar.gz) = 77580 diff --git a/security/pwman/Makefile b/security/pwman/Makefile index ae0b722597e..68d5d245c5c 100644 --- a/security/pwman/Makefile +++ b/security/pwman/Makefile @@ -2,7 +2,7 @@ PORTNAME= pwman DISTVERSION= 0.4.5 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= security MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTNAME}-${PORTVERSION} diff --git a/security/py-RestrictedPython/files/patch-2to3 b/security/py-RestrictedPython/files/patch-2to3 new file mode 100644 index 00000000000..931d63b3254 --- /dev/null +++ b/security/py-RestrictedPython/files/patch-2to3 @@ -0,0 +1,469 @@ +--- src/RestrictedPython/Eval.py.orig 2010-07-09 04:28:54 UTC ++++ src/RestrictedPython/Eval.py +@@ -60,10 +60,10 @@ class RestrictionCapableEval: + self.expr, '') + if PROFILE: + end = clock() +- print 'prepRestrictedCode: %d ms for %s' % ( +- (end - start) * 1000, `self.expr`) ++ print('prepRestrictedCode: %d ms for %s' % ( ++ (end - start) * 1000, repr(self.expr))) + if err: +- raise SyntaxError, err[0] ++ raise SyntaxError(err[0]) + self.used = tuple(used.keys()) + self.rcode = co + +--- src/RestrictedPython/Guards.py.orig 2010-07-09 04:07:16 UTC ++++ src/RestrictedPython/Guards.py +@@ -98,7 +98,7 @@ def _write_wrapper(): + try: + f = getattr(self.ob, secattr) + except AttributeError: +- raise TypeError, error_msg ++ raise TypeError(error_msg) + f(*args) + return handler + class Wrapper: +--- src/RestrictedPython/Limits.py.orig 2010-07-07 14:42:56 UTC ++++ src/RestrictedPython/Limits.py +@@ -25,22 +25,22 @@ def limited_range(iFirst, *args): + elif len(args) == 2: + iStart, iEnd, iStep = iFirst, args[0], args[1] + else: +- raise AttributeError, 'range() requires 1-3 int arguments' +- if iStep == 0: raise ValueError, 'zero step for range()' ++ raise AttributeError('range() requires 1-3 int arguments') ++ if iStep == 0: raise ValueError('zero step for range()') + iLen = int((iEnd - iStart) / iStep) + if iLen < 0: iLen = 0 +- if iLen >= RANGELIMIT: raise ValueError, 'range() too large' +- return range(iStart, iEnd, iStep) ++ if iLen >= RANGELIMIT: raise ValueError('range() too large') ++ return list(range(iStart, iEnd, iStep)) + limited_builtins['range'] = limited_range + + def limited_list(seq): + if isinstance(seq, str): +- raise TypeError, 'cannot convert string to list' ++ raise TypeError('cannot convert string to list') + return list(seq) + limited_builtins['list'] = limited_list + + def limited_tuple(seq): + if isinstance(seq, str): +- raise TypeError, 'cannot convert string to tuple' ++ raise TypeError('cannot convert string to tuple') + return tuple(seq) + limited_builtins['tuple'] = limited_tuple +--- src/RestrictedPython/RCompile.py.orig 2010-07-07 14:42:56 UTC ++++ src/RestrictedPython/RCompile.py +@@ -20,12 +20,12 @@ from compiler import ast, parse, misc, syntax, pycodeg + from compiler.pycodegen import AbstractCompileMode, Expression, \ + Interactive, Module, ModuleCodeGenerator, FunctionCodeGenerator, findOp + +-import MutatingWalker +-from RestrictionMutator import RestrictionMutator ++from . import MutatingWalker ++from .RestrictionMutator import RestrictionMutator + + + def niceParse(source, filename, mode): +- if isinstance(source, unicode): ++ if isinstance(source, str): + # Use the utf-8-sig BOM so the compiler + # detects this as a UTF-8 encoded string. + source = '\xef\xbb\xbf' + source.encode('utf-8') +@@ -58,7 +58,7 @@ class RestrictedCompileMode(AbstractCompileMode): + tree = self.parse() + MutatingWalker.walk(tree, self.rm) + if self.rm.errors: +- raise SyntaxError, self.rm.errors[0] ++ raise SyntaxError(self.rm.errors[0]) + misc.set_filename(self.filename, tree) + syntax.check(tree) + return tree +@@ -72,7 +72,7 @@ class RestrictedCompileMode(AbstractCompileMode): + def compileAndTuplize(gen): + try: + gen.compile() +- except SyntaxError, v: ++ except SyntaxError as v: + return None, (str(v),), gen.rm.warnings, gen.rm.used_names + return gen.getCode(), (), gen.rm.warnings, gen.rm.used_names + +--- src/RestrictedPython/tests/before_and_after.py.orig 2010-07-09 04:29:10 UTC ++++ src/RestrictedPython/tests/before_and_after.py +@@ -77,11 +77,11 @@ def nested_list_comprehension_after(): + # print + + def simple_print_before(): +- print "foo" ++ print("foo") + + def simple_print_after(): + _print = _print_() +- print >> _print, "foo" ++ print("foo", file=_print) + + # getitem + +@@ -117,13 +117,13 @@ def simple_delitem_after(): + + def function_with_print_before(): + def foo(): +- print "foo" ++ print("foo") + return printed + + def function_with_print_after(): + def foo(): + _print = _print_() +- print >> _print, "foo" ++ print("foo", file=_print) + return _print() + + def function_with_getattr_before(): +--- src/RestrictedPython/tests/class.py.orig 2010-07-07 14:42:56 UTC ++++ src/RestrictedPython/tests/class.py +@@ -10,4 +10,4 @@ x = MyClass() + x.set(12) + x.set(x.get() + 1) + if x.get() != 13: +- raise AssertionError, "expected 13, got %d" % x.get() ++ raise AssertionError("expected 13, got %d" % x.get()) +--- src/RestrictedPython/tests/restricted_module.py.orig 2010-07-07 14:42:56 UTC ++++ src/RestrictedPython/tests/restricted_module.py +@@ -1,42 +1,43 @@ + import sys ++from functools import reduce + + def print0(): +- print 'Hello, world!', ++ print('Hello, world!', end=' ') + return printed + + def print1(): +- print 'Hello,', +- print 'world!', ++ print('Hello,', end=' ') ++ print('world!', end=' ') + return printed + + def printStuff(): +- print 'a', 'b', 'c', ++ print('a', 'b', 'c', end=' ') + return printed + + def printToNone(): + x = None +- print >>x, 'Hello, world!', ++ print('Hello, world!', end=' ', file=x) + return printed + + def printLines(): + # This failed before Zope 2.4.0a2 +- r = range(3) ++ r = list(range(3)) + for n in r: + for m in r: +- print m + n * len(r), +- print ++ print(m + n * len(r), end=' ') ++ print() + return printed + + def try_map(): + inc = lambda i: i+1 + x = [1, 2, 3] +- print map(inc, x), ++ print(list(map(inc, x)), end=' ') + return printed + + def try_apply(): + def f(x, y, z): + return x + y + z +- print f(*(300, 20), **{'z': 1}), ++ print(f(*(300, 20), **{'z': 1}), end=' ') + return printed + + def try_inplace(): +@@ -45,17 +46,17 @@ def try_inplace(): + + def primes(): + # Somewhat obfuscated code on purpose +- print filter(None,map(lambda y:y*reduce(lambda x,y:x*y!=0, +- map(lambda x,y=y:y%x,range(2,int(pow(y,0.5)+1))),1),range(2,20))), ++ print([_f for _f in [y*reduce(lambda x,y:x*y!=0, ++ list(map(lambda x,y=y:y%x,list(range(2,int(pow(y,0.5)+1))))),1) for y in range(2,20)] if _f], end=' ') + return printed + + def allowed_read(ob): +- print ob.allowed +- print ob.s +- print ob[0] +- print ob[2] +- print ob[3:-1] +- print len(ob) ++ print(ob.allowed) ++ print(ob.s) ++ print(ob[0]) ++ print(ob[2]) ++ print(ob[3:-1]) ++ print(len(ob)) + return printed + + def allowed_default_args(ob): +@@ -83,13 +84,13 @@ def allowed_simple(): + def allowed_write(ob): + ob.writeable = 1 + #ob.writeable += 1 +- [1 for ob.writeable in 1,2] ++ [1 for ob.writeable in [1,2]] + ob['safe'] = 2 + #ob['safe'] += 2 +- [1 for ob['safe'] in 1,2] ++ [1 for ob['safe'] in [1,2]] + + def denied_print(ob): +- print >> ob, 'Hello, world!', ++ print('Hello, world!', end=' ', file=ob) + + def denied_getattr(ob): + #ob.disallowed += 1 +@@ -108,7 +109,7 @@ def denied_setattr2(ob): + ob.allowed = -1 + + def denied_setattr3(ob): +- [1 for ob.allowed in 1,2] ++ [1 for ob.allowed in [1,2]] + + def denied_getitem(ob): + ob[1] +@@ -125,7 +126,7 @@ def denied_setitem2(ob): + ob['x'] = 2 + + def denied_setitem3(ob): +- [1 for ob['x'] in 1,2] ++ [1 for ob['x'] in [1,2]] + + def denied_setslice(ob): + ob[0:1] = 'a' +@@ -135,7 +136,7 @@ def denied_setslice2(ob): + ob[0:1] = 'a' + + def denied_setslice3(ob): +- [1 for ob[0:1] in 1,2] ++ [1 for ob[0:1] in [1,2]] + + ##def strange_attribute(): + ## # If a guard has attributes with names that don't start with an +--- src/RestrictedPython/tests/security_in_syntax.py.orig 2010-07-09 04:07:16 UTC ++++ src/RestrictedPython/tests/security_in_syntax.py +@@ -29,7 +29,7 @@ def bad_attr(): + some_ob._some_attr = 15 + + def no_exec(): +- exec 'q = 1' ++ exec('q = 1') + + def no_yield(): + yield 42 +@@ -47,7 +47,7 @@ def from_import_as_bad_name(): + def except_using_bad_name(): + try: + foo +- except NameError, _leading_underscore: ++ except NameError as _leading_underscore: + # The name of choice (say, _write) is now assigned to an exception + # object. Hard to exploit, but conceivable. + pass +--- src/RestrictedPython/tests/testRestrictions.py.orig 2010-07-09 04:07:16 UTC ++++ src/RestrictedPython/tests/testRestrictions.py +@@ -52,10 +52,10 @@ def find_source(fn, func): + + def get_source(func): + """Less silly interface to find_source""" +- file = func.func_globals['__file__'] ++ file = func.__globals__['__file__'] + if file.endswith('.pyc'): + file = file[:-1] +- source = find_source(file, func.func_code)[1] ++ source = find_source(file, func.__code__)[1] + assert source.strip(), "Source should not be empty!" + return source + +@@ -76,7 +76,7 @@ def create_rmodule(): + 'len', 'chr', 'ord', + ): + rmodule[name] = builtins[name] +- exec code in rmodule ++ exec(code, rmodule) + + class AccessDenied (Exception): pass + +@@ -145,7 +145,7 @@ def guarded_getitem(ob, index): + + def minimal_import(name, _globals, _locals, names): + if name != "__future__": +- raise ValueError, "Only future imports are allowed" ++ raise ValueError("Only future imports are allowed") + import __future__ + return __future__ + +@@ -185,14 +185,14 @@ def inplacevar_wrapper(op, x, y): + inplacevar_wrapper_called[op] = x, y + # This is really lame. But it's just a test. :) + globs = {'x': x, 'y': y} +- exec 'x'+op+'y' in globs ++ exec('x'+op+'y', globs) + return globs['x'] + + class RestrictionTests(unittest.TestCase): + def execFunc(self, name, *args, **kw): + func = rmodule[name] +- verify.verify(func.func_code) +- func.func_globals.update({'_getattr_': guarded_getattr, ++ verify.verify(func.__code__) ++ func.__globals__.update({'_getattr_': guarded_getattr, + '_getitem_': guarded_getitem, + '_write_': TestGuard, + '_print_': PrintCollector, +@@ -263,7 +263,7 @@ class RestrictionTests(unittest.TestCase): + self.assertEqual(inplacevar_wrapper_called['+='], (1, 3)) + + def checkDenied(self): +- for k in rmodule.keys(): ++ for k in list(rmodule.keys()): + if k[:6] == 'denied': + try: + self.execFunc(k, RestrictedObject()) +@@ -290,10 +290,10 @@ class RestrictionTests(unittest.TestCase): + # Unrestricted compile. + code = compile(source, fn, 'exec') + m = {'__builtins__': {'__import__':minimal_import}} +- exec code in m +- for k, v in m.items(): ++ exec(code, m) ++ for k, v in list(m.items()): + if hasattr(v, 'func_code'): +- filename, source = find_source(fn, v.func_code) ++ filename, source = find_source(fn, v.__code__) + # Now compile it with restrictions + try: + code = compile_restricted(source, filename, 'exec') +@@ -327,11 +327,11 @@ class RestrictionTests(unittest.TestCase): + self.assertEqual(res, expect) + + def checkStackSize(self): +- for k, rfunc in rmodule.items(): ++ for k, rfunc in list(rmodule.items()): + if not k.startswith('_') and hasattr(rfunc, 'func_code'): +- rss = rfunc.func_code.co_stacksize +- ss = getattr(restricted_module, k).func_code.co_stacksize +- self.failUnless( ++ rss = rfunc.__code__.co_stacksize ++ ss = getattr(restricted_module, k).__code__.co_stacksize ++ self.assertTrue( + rss >= ss, 'The stack size estimate for %s() ' + 'should have been at least %d, but was only %d' + % (k, ss, rss)) +@@ -427,7 +427,7 @@ class RestrictionTests(unittest.TestCase): + calls.append(seq) + return list(seq) + globals = {"_getiter_": getiter, '_inplacevar_': inplacevar_wrapper} +- exec co in globals, {} ++ exec(co, globals, {}) + # The comparison here depends on the exact code that is + # contained in unpack.py. + # The test doing implicit unpacking in an "except:" clause is +@@ -454,7 +454,7 @@ class RestrictionTests(unittest.TestCase): + [[[3, 4]]], [[3, 4]], [3, 4], + ] + i = expected.index(ineffable) +- self.assert_(isinstance(calls[i], TypeError)) ++ self.assertTrue(isinstance(calls[i], TypeError)) + expected[i] = calls[i] + self.assertEqual(calls, expected) + +@@ -466,7 +466,7 @@ class RestrictionTests(unittest.TestCase): + calls.append(s) + return list(s) + globals = {"_getiter_": getiter} +- exec co in globals, {} ++ exec(co, globals, {}) + self.assertEqual(calls, [[(1,2)], (1, 2)]) + + def checkUnpackSequenceSingle(self): +@@ -477,7 +477,7 @@ class RestrictionTests(unittest.TestCase): + calls.append(s) + return list(s) + globals = {"_getiter_": getiter} +- exec co in globals, {} ++ exec(co, globals, {}) + self.assertEqual(calls, [(1, 2)]) + + def checkClass(self): +@@ -496,7 +496,7 @@ class RestrictionTests(unittest.TestCase): + globals = {"_getattr_": test_getattr, + "_write_": test_setattr, + } +- exec co in globals, {} ++ exec(co, globals, {}) + # Note that the getattr calls don't correspond to the method call + # order, because the x.set method is fetched before its arguments + # are evaluated. +@@ -506,7 +506,7 @@ class RestrictionTests(unittest.TestCase): + + def checkLambda(self): + co = self._compile_file("lambda.py") +- exec co in {}, {} ++ exec(co, {}, {}) + + def checkEmpty(self): + rf = RFunction("", "", "issue945", "empty.py", {}) +--- src/RestrictedPython/tests/unpack.py.orig 2010-07-07 14:42:56 UTC ++++ src/RestrictedPython/tests/unpack.py +@@ -20,7 +20,7 @@ try: + except ValueError: + pass + else: +- raise AssertionError, "expected 'unpack list of wrong size'" ++ raise AssertionError("expected 'unpack list of wrong size'") + + def u2(L): + x, (a, b), y = L +@@ -37,9 +37,10 @@ try: + except TypeError: + pass + else: +- raise AssertionError, "expected 'iteration over non-sequence'" ++ raise AssertionError("expected 'iteration over non-sequence'") + +-def u3((x, y)): ++def u3(xxx_todo_changeme): ++ (x, y) = xxx_todo_changeme + assert x == 'a' + assert y == 'b' + return x, y +@@ -58,7 +59,8 @@ def u5(x): + raise TypeError(x) + # This one is tricky to test, because the first level of unpacking + # has a TypeError instance. That's a headache for the test driver. +- except TypeError, [(a, b)]: ++ except TypeError as xxx_todo_changeme1: ++ [(a, b)] = xxx_todo_changeme1.args + assert a == 42 + assert b == 666 + +--- src/RestrictedPython/tests/verify.py.orig 2010-07-07 14:42:56 UTC ++++ src/RestrictedPython/tests/verify.py +@@ -83,7 +83,7 @@ def _verifycode(code): + window[2].arg == "_write_"): + # check that arg is appropriately wrapped + for i, op in enumerate(window): +- print i, op.opname, op.arg ++ print(i, op.opname, op.arg) + raise ValueError("unguard attribute set/del at %s:%d" + % (code.co_filename, line)) + if op.opname.startswith("UNPACK"): diff --git a/security/py-cerealizer/files/patch-2to3 b/security/py-cerealizer/files/patch-2to3 new file mode 100644 index 00000000000..e6dc7b618ea --- /dev/null +++ b/security/py-cerealizer/files/patch-2to3 @@ -0,0 +1,98 @@ +--- __init__.py.orig 2008-06-29 21:52:17 UTC ++++ __init__.py +@@ -123,12 +123,12 @@ import logging + logger = logging.getLogger("cerealizer") + #logging.basicConfig(level=logging.INFO) + +-from cStringIO import StringIO ++from io import StringIO + from new import instance + +-class EndOfFile(StandardError): pass +-class NotCerealizerFileError(StandardError): pass +-class NonCerealizableObjectError(StandardError): pass ++class EndOfFile(Exception): pass ++class NotCerealizerFileError(Exception): pass ++class NonCerealizableObjectError(Exception): pass + + def _priority_sorter(a, b): return cmp(a[0], b[0]) + +@@ -206,7 +206,7 @@ Reads a reference from file S.""" + elif c == "r": return self.id2obj[int(s.readline())] + elif c == "n": return None + elif c == "b": return bool(int(s.read(1))) +- elif c == "l": return long(s.readline()) ++ elif c == "l": return int(s.readline()) + elif c == "c": return complex(s.readline()) + raise ValueError("Unknown ref code '%s'!" % c) + +@@ -357,13 +357,13 @@ class DictHandler(Handler): + classname = "dict\n" + def collect(self, obj, dumper): + if Handler.collect(self, obj, dumper): +- for i in obj.iterkeys (): dumper.collect(i) # Collect is not ordered +- for i in obj.itervalues(): dumper.collect(i) ++ for i in obj.keys (): dumper.collect(i) # Collect is not ordered ++ for i in obj.values(): dumper.collect(i) + return 1 + + def dump_data(self, obj, dumper, s): + s.write("%s\n" % len(obj)) +- for k, v in obj.iteritems(): ++ for k, v in obj.items(): + _HANDLERS_[v.__class__].dump_ref(v, dumper, s) # Value is saved fist + _HANDLERS_[k.__class__].dump_ref(k, dumper, s) + +@@ -509,16 +509,16 @@ have to write a custom Handler or a __getstate__ and _ + + CLASSNAME is the classname used in Cerealizer files. It defaults to the full classname (module.class) + but you may choose something shorter -- as long as there is no risk of name clash.""" +- if not _configurable: raise StandardError("Cannot register new classes after freeze_configuration has been called!") ++ if not _configurable: raise Exception("Cannot register new classes after freeze_configuration has been called!") + if "\n" in classname: raise ValueError("CLASSNAME cannot have \\n (Cerealizer automatically add a trailing \\n for performance reason)!") + if not handler: + if hasattr(Class, "__getnewargs__" ): handler = NewArgsObjHandler (Class, classname) + elif hasattr(Class, "__getinitargs__"): handler = InitArgsObjHandler(Class, classname) + elif hasattr(Class, "__slots__" ): handler = SlotedObjHandler (Class, classname) + else: handler = ObjHandler (Class, classname) +- if _HANDLERS_.has_key(Class): raise ValueError("Class %s has already been registred!" % Class) ++ if Class in _HANDLERS_: raise ValueError("Class %s has already been registred!" % Class) + if not isinstance(handler, RefHandler): +- if _HANDLERS .has_key(handler.classname): raise ValueError("A class has already been registred under the name %s!" % handler.classname[:-1]) ++ if handler.classname in _HANDLERS: raise ValueError("A class has already been registred under the name %s!" % handler.classname[:-1]) + _HANDLERS [handler.classname] = handler + if handler.__class__ is ObjHandler: + logger.info("Registring class %s as '%s'" % (Class, handler.classname[:-1])) +@@ -544,7 +544,7 @@ and you'll be able to open old files containing OldCla + handler = _HANDLERS_.get(Class) + if not handler: + raise ValueError("Cannot register alias '%s' to Class %s: the class is not yet registred!" % (alias, Class)) +- if _HANDLERS.has_key(alias): ++ if alias in _HANDLERS: + raise ValueError("Cannot register alias '%s' to Class %s: another class is already registred under the alias name!" % (alias, Class)) + logger.info("Registring alias '%s' for %s" % (alias, Class)) + _HANDLERS[alias + "\n"] = handler +@@ -563,10 +563,10 @@ unexpected calls to register().""" + + register(type(None), NoneHandler ()) + register(str , StrHandler ()) +-register(unicode , UnicodeHandler ()) ++register(str , UnicodeHandler ()) + register(bool , BoolHandler ()) + register(int , IntHandler ()) +-register(long , LongHandler ()) ++register(int , LongHandler ()) + register(float , FloatHandler ()) + register(complex , ComplexHandler ()) + register(dict , DictHandler ()) +@@ -613,8 +613,8 @@ def dump_class_of_module(*modules): + Utility function; for each classes found in the given module, print the needed call to register.""" + class D: pass + class O(object): pass +- s = set([c for module in modules for c in module.__dict__.values() if isinstance(c, type(D)) or isinstance(c, type(O))]) ++ s = set([c for module in modules for c in list(module.__dict__.values()) if isinstance(c, type(D)) or isinstance(c, type(O))]) + l = ['cerealizer.register(%s.%s)' % (c.__module__, c.__name__) for c in s] + l.sort() +- for i in l: print i ++ for i in l: print(i) + diff --git a/security/py-ecdsa/Makefile b/security/py-ecdsa/Makefile index 40978ba11e8..cac6d1314c0 100644 --- a/security/py-ecdsa/Makefile +++ b/security/py-ecdsa/Makefile @@ -1,7 +1,7 @@ # Created by: Neil Booth PORTNAME= ecdsa -PORTVERSION= 0.16.0 +PORTVERSION= 0.17.0 CATEGORIES= security python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,25 +13,24 @@ LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}six>=1.9.0:devel/py-six@${PY_FLAVOR} -TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>0:devel/py-pytest@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}hypothesis>0:devel/py-hypothesis@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}hypothesis>=0:devel/py-hypothesis@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pytest>=0:devel/py-pytest@${PY_FLAVOR} USES= python:3.6+ -USE_PYTHON= distutils autoplist +USE_PYTHON= autoplist concurrent distutils -OPTIONS_RADIO= FASTER +NO_ARCH= yes + +OPTIONS_RADIO= FASTER OPTIONS_RADIO_FASTER= GMPY GMPY2 -OPTIONS_DEFAULT= GMPY2 - +OPTIONS_DEFAULT=GMPY2 GMPY_DESC= GMPY support GMPY2_DESC= GMPY2 support GMPY_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}gmpy>=0:math/py-gmpy@${PY_FLAVOR} GMPY2_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}gmpy2>=0:math/py-gmpy2@${PY_FLAVOR} -NO_ARCH= yes - do-test: - @(cd ${TEST_WRKSRC} && ${SETENV} ${TEST_ENV} ${PYTHON_CMD} -m pytest) + cd ${WRKSRC} && ${PYTHON_CMD} -m pytest -rs -v .include diff --git a/security/py-ecdsa/distinfo b/security/py-ecdsa/distinfo index ba990db361a..37703e06405 100644 --- a/security/py-ecdsa/distinfo +++ b/security/py-ecdsa/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1604999391 -SHA256 (ecdsa-0.16.0.tar.gz) = 494c6a853e9ed2e9be33d160b41d47afc50a6629b993d2b9c5ad7bb226add892 -SIZE (ecdsa-0.16.0.tar.gz) = 125851 +TIMESTAMP = 1647264666 +SHA256 (ecdsa-0.17.0.tar.gz) = b9f500bb439e4153d0330610f5d26baaf18d17b8ced1bc54410d189385ea68aa +SIZE (ecdsa-0.17.0.tar.gz) = 158486 diff --git a/security/py-ecdsa/pkg-descr b/security/py-ecdsa/pkg-descr index 75d2c28950a..766dd192ddd 100644 --- a/security/py-ecdsa/pkg-descr +++ b/security/py-ecdsa/pkg-descr @@ -1,18 +1,24 @@ -This is an easy-to-use implementation of ECDSA cryptography (Elliptic Curve -Digital Signature Algorithm), implemented purely in Python, released under -the MIT license. With this library, you can quickly create keypairs (signing -key and verifying key), sign messages, and verify the signatures. The keys -and signatures are very short, making them easy to handle and incorporate -into other protocols. +This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) with +support for ECDSA (Elliptic Curve Digital Signature Algorithm) and ECDH +(Elliptic Curve Diffie-Hellman), implemented purely in Python, released under +the MIT license. With this library, you can quickly create key pairs (signing +key and verifying key), sign messages, and verify the signatures. You can also +agree on a shared secret key based on exchanged public keys. The keys and +signatures are very short, making them easy to handle and incorporate into other +protocols. -Features +This library provides key generation, signing, verifying, and shared secret +derivation for five popular NIST "Suite B" GF(p) (prime field) curves, with key +lengths of 192, 224, 256, 384, and 521 bits. The "short names" for these curves, +as known by the OpenSSL tool (openssl ecparam -list_curves), are: prime192v1, +secp224r1, prime256v1, secp384r1, and secp521r1. It includes the 256-bit curve +secp256k1 used by Bitcoin. There is also support for the regular (non-twisted) +variants of Brainpool curves from 160 to 512 bits. The "short names" of those +curves are: brainpoolP160r1, brainpoolP192r1, brainpoolP224r1, brainpoolP256r1, +brainpoolP320r1, brainpoolP384r1, brainpoolP512r1. Few of the small curves from +SEC standard are also included (mainly to speed-up testing of the library), +those are: secp112r1, secp112r2, secp128r1, and secp160r1. No other curves are +included, but it is not too hard to add support for more curves over prime +fields. -This library provides key generation, signing, and verifying, for five -popular NIST "Suite B" GF(p) curves, with key lengths of 192, 224, 256, 384, -and 521 bits. The "short names" for these curves, as known by the OpenSSL -tool (`openssl ecparam --list_curves`), are: prime192v1, secp224r1, -prime256v1, secp384r1, and secp521r1. It also includes the 256-bit curve used -by Bitcoin, whose short name is secp256k1. No other curves are included, but -it would not be too hard to add more. - -WWW: https://github.com/warner/python-ecdsa +WWW: https://github.com/tlsfuzzer/python-ecdsa diff --git a/security/py-gnutls/files/patch-2to3 b/security/py-gnutls/files/patch-2to3 new file mode 100644 index 00000000000..ab5564904ae --- /dev/null +++ b/security/py-gnutls/files/patch-2to3 @@ -0,0 +1,201 @@ +--- gnutls/connection.py.orig 2017-01-26 09:15:54 UTC ++++ gnutls/connection.py +@@ -388,7 +388,7 @@ class ServerSession(Session): + data_length = c_size_t(256) + data = create_string_buffer(data_length.value) + hostname_type = c_uint() +- for i in xrange(2**16): ++ for i in range(2**16): + try: + gnutls_server_name_get(self._c_object, data, byref(data_length), byref(hostname_type), i) + except RequestedDataNotAvailable: +@@ -407,7 +407,7 @@ class ServerSessionFactory(object): + + def __init__(self, socket, context, session_class=ServerSession): + if not issubclass(session_class, ServerSession): +- raise TypeError, "session_class must be a subclass of ServerSession" ++ raise TypeError("session_class must be a subclass of ServerSession") + self.socket = socket + self.context = context + self.session_class = session_class +--- gnutls/constants.py.orig 2016-03-08 13:28:28 UTC ++++ gnutls/constants.py +@@ -31,7 +31,7 @@ class GNUTLSConstant(int): + + ## Generate all exported constants + code = '\n'.join(["%s = GNUTLSConstant('%s')" % (name, name) for name in __all__]) +-exec code in locals(), globals() ++exec(code, locals(), globals()) + del code, name + + del constants +--- gnutls/interfaces/twisted/__init__.py.orig 2016-03-08 13:28:28 UTC ++++ gnutls/interfaces/twisted/__init__.py +@@ -41,7 +41,7 @@ class RecurrentCall(object): + self.now, self.next = self.next, self.next + self.period + result = self.func(*self.args, **self.kwargs) + if result is KeepRunning: +- delay = max(self.next-time(), 0) ++ delay = max(self.__next__-time(), 0) + self.callid = reactor.callLater(delay, self) + def cancel(self): + if self.callid is not None: +@@ -77,7 +77,7 @@ class TLSMixin: + return tcp.Connection.doRead(self) + except (OperationWouldBlock, OperationInterrupted): + return +- except GNUTLSError, e: ++ except GNUTLSError as e: + return e + + def writeSomeData(self, data): +@@ -87,7 +87,7 @@ class TLSMixin: + return self.writeSomeData(data) + except OperationWouldBlock: + return 0 +- except GNUTLSError, e: ++ except GNUTLSError as e: + return e + + def _sendCloseReason(self, reason): +@@ -117,11 +117,11 @@ class TLSMixin: + self.stopWriting() + try: + self._sendCloseAlert(SHUT_WR) +- except OperationWouldBlock, e: ++ except OperationWouldBlock as e: + if self.socket.interrupted_while_writing: + self.startWriting() + return +- except Exception, e: ++ except Exception as e: + return e + del self.doWrite + +@@ -153,7 +153,7 @@ class TLSClient(TLSMixin, tcp.Client): + return + try: + self.context.credentials.verify_callback(self.socket.peer_certificate) +- except Exception, e: ++ except Exception as e: + self.loseConnection(e) + return + else: +@@ -166,7 +166,7 @@ class TLSClient(TLSMixin, tcp.Client): + return + try: + session.verify_peer() +- except Exception, e: ++ except Exception as e: + preverify_status = e + else: + preverify_status = CertificateOK +@@ -184,7 +184,7 @@ class TLSClient(TLSMixin, tcp.Client): + if self.socket.interrupted_while_writing: + self.startWriting() + return +- except GNUTLSError, e: ++ except GNUTLSError as e: + del self.doRead + self.failIfNotConnected(err = e) + return +@@ -195,11 +195,11 @@ class TLSClient(TLSMixin, tcp.Client): + + try: + self._verifyPeer() +- except GNUTLSError, e: ++ except GNUTLSError as e: + self.closeTLSSession(e) + self.failIfNotConnected(err = e) + return +- except Exception, e: ++ except Exception as e: + self.closeTLSSession(e) + self.failIfNotConnected(err = error.getConnectError(str(e))) + return +@@ -258,7 +258,7 @@ class TLSServer(TLSMixin, tcp.Server): + return + try: + self.context.credentials.verify_callback(self.socket.peer_certificate) +- except Exception, e: ++ except Exception as e: + self.loseConnection(e) + return + else: +@@ -271,7 +271,7 @@ class TLSServer(TLSMixin, tcp.Server): + return + try: + session.verify_peer() +- except Exception, e: ++ except Exception as e: + preverify_status = e + else: + preverify_status = CertificateOK +@@ -289,7 +289,7 @@ class TLSServer(TLSMixin, tcp.Server): + if self.socket.interrupted_while_writing: + self.startWriting() + return +- except GNUTLSError, e: ++ except GNUTLSError as e: + del self.doRead + return e + +@@ -300,7 +300,7 @@ class TLSServer(TLSMixin, tcp.Server): + + try: + self._verifyPeer() +- except Exception, e: ++ except Exception as e: + self.loseConnection(e) + return + +--- gnutls/validators.py.orig 2016-03-08 13:28:28 UTC ++++ gnutls/validators.py +@@ -76,7 +76,7 @@ class TypeValidator(Validator): + class MultiTypeValidator(TypeValidator): + @staticmethod + def can_validate(obj): +- return isinstance(obj, tuple) and not filter(lambda x: not isclass(x), obj) ++ return isinstance(obj, tuple) and not [x for x in obj if not isclass(x)] + + class OneOfValidator(Validator): + def __init__(self, typ): +@@ -94,7 +94,7 @@ class ListOfValidator(Validator): + def __init__(self, typ): + self.type = typ.type + def check(self, value): +- return isinstance(value, (tuple, list)) and not filter(lambda x: not isinstance(x, self.type), value) ++ return isinstance(value, (tuple, list)) and not [x for x in value if not isinstance(x, self.type)] + @staticmethod + def can_validate(obj): + return isinstance(obj, list_of) +@@ -109,7 +109,7 @@ class ComplexValidator(Validator): + return bool(sum(t.check(value) for t in self.type)) + @staticmethod + def can_validate(obj): +- return isinstance(obj, tuple) and not filter(lambda x: Validator.get(x) is None, obj) ++ return isinstance(obj, tuple) and not [x for x in obj if Validator.get(x) is None] + @property + def name(self): + return self.join_names([x.name for x in self.type]) +@@ -135,7 +135,7 @@ class one_of(object): + + class list_of(object): + def __init__(self, *args): +- if filter(lambda x: not isclass(x), args): ++ if [x for x in args if not isclass(x)]: + raise TypeError("list_of arguments must be types") + if len(args) == 1: + self.type = args[0] +@@ -163,9 +163,9 @@ def preserve_signature(func): + if constants: + ## import the required GNUTLSConstants used as function default arguments + code = "from gnutls.constants import %s\n" % ', '.join(c.name for c in constants) +- exec code in locals(), locals() ++ exec(code, locals(), locals()) + code = "def %s(%s): return wrapper(%s)\nnew_wrapper = %s\n" % (func.__name__, signature, parameters, func.__name__) +- exec code in locals(), locals() ++ exec(code, locals(), locals()) + new_wrapper.__name__ = func.__name__ + new_wrapper.__doc__ = func.__doc__ + new_wrapper.__module__ = func.__module__ diff --git a/security/py-pyaxo/files/patch-2to3 b/security/py-pyaxo/files/patch-2to3 new file mode 100644 index 00000000000..5cfdb57c6c4 --- /dev/null +++ b/security/py-pyaxo/files/patch-2to3 @@ -0,0 +1,173 @@ +--- pyaxo.py.orig 2017-09-15 21:50:29 UTC ++++ pyaxo.py +@@ -124,18 +124,18 @@ class Axolotl(object): + def initState(self, other_name, other_identityKey, other_handshakeKey, + other_ratchetKey, verify=True): + if verify: +- print 'Confirm ' + other_name + ' has identity key fingerprint:\n' ++ print('Confirm ' + other_name + ' has identity key fingerprint:\n') + fingerprint = hash_(other_identityKey).encode('hex').upper() + fprint = '' + for i in range(0, len(fingerprint), 4): + fprint += fingerprint[i:i+2] + ':' +- print fprint[:-1] + '\n' +- print 'Be sure to verify this fingerprint with ' + other_name + \ +- ' by some out-of-band method!' +- print 'Otherwise, you may be subject to a Man-in-the-middle attack!\n' +- ans = raw_input('Confirm? y/N: ').strip() ++ print(fprint[:-1] + '\n') ++ print('Be sure to verify this fingerprint with ' + other_name + \ ++ ' by some out-of-band method!') ++ print('Otherwise, you may be subject to a Man-in-the-middle attack!\n') ++ ans = input('Confirm? y/N: ').strip() + if ans != 'y': +- print 'Key fingerprint not confirmed - exiting...' ++ print('Key fingerprint not confirmed - exiting...') + sys.exit() + + self.conversation = self.init_conversation(other_name, +@@ -386,7 +386,7 @@ class AxolotlConversation: + def _try_skipped_mk(self, msg, pad_length): + msg1 = msg[:HEADER_LEN-pad_length] + msg2 = msg[HEADER_LEN:] +- for skipped_mk in self.staged_hk_mk.values(): ++ for skipped_mk in list(self.staged_hk_mk.values()): + try: + decrypt_symmetric(skipped_mk.hk, msg1) + body = decrypt_symmetric(skipped_mk.mk, msg2) +@@ -456,7 +456,7 @@ class AxolotlConversation: + try: + body = decrypt_symmetric(mk, msg[HEADER_LEN:]) + except CryptoError: +- print 'Undecipherable message' ++ print('Undecipherable message') + sys.exit(1) + else: + try: +@@ -464,7 +464,7 @@ class AxolotlConversation: + except CryptoError: + pass + if self.ratchet_flag or not header or header == '': +- print 'Undecipherable message' ++ print('Undecipherable message') + sys.exit(1) + Np = struct.unpack('>I', header[:HEADER_COUNT_NUM_LEN])[0] + PNp = struct.unpack('>I', header[HEADER_COUNT_NUM_LEN:HEADER_COUNT_NUM_LEN*2])[0] +@@ -481,7 +481,7 @@ class AxolotlConversation: + except CryptoError: + pass + if not body or body == '': +- print 'Undecipherable message' ++ print('Undecipherable message') + sys.exit(1) + self.keys['RK'] = RKp + self.keys['HKr'] = HKp +@@ -499,7 +499,7 @@ class AxolotlConversation: + plaintext = f.read() + ciphertext = b2a(self.encrypt(plaintext)) + '\n' + with open(filename+'.asc', 'w') as f: +- lines = [ciphertext[i:i+64] for i in xrange(0, len(ciphertext), 64)] ++ lines = [ciphertext[i:i+64] for i in range(0, len(ciphertext), 64)] + for line in lines: + f.write(line+'\n') + +@@ -507,7 +507,7 @@ class AxolotlConversation: + with open(filename, 'r') as f: + ciphertext = a2b(f.read()) + plaintext = self.decrypt(ciphertext) +- print plaintext ++ print(plaintext) + + def encrypt_pipe(self): + plaintext = sys.stdin.read() +@@ -528,46 +528,46 @@ class AxolotlConversation: + self._axolotl.delete_conversation(self) + + def print_keys(self): +- print 'Your Identity key is:\n' + b2a(self.keys['DHIs']) + '\n' ++ print('Your Identity key is:\n' + b2a(self.keys['DHIs']) + '\n') + fingerprint = hash_(self.keys['DHIs']).encode('hex').upper() + fprint = '' + for i in range(0, len(fingerprint), 4): + fprint += fingerprint[i:i+2] + ':' +- print 'Your identity key fingerprint is: ' +- print fprint[:-1] + '\n' +- print 'Your Ratchet key is:\n' + b2a(self.keys['DHRs']) + '\n' ++ print('Your identity key fingerprint is: ') ++ print(fprint[:-1] + '\n') ++ print('Your Ratchet key is:\n' + b2a(self.keys['DHRs']) + '\n') + if self.handshake_key: +- print 'Your Handshake key is:\n' + b2a(self.handshake_pkey) ++ print('Your Handshake key is:\n' + b2a(self.handshake_pkey)) + else: +- print 'Your Handshake key is not available' ++ print('Your Handshake key is not available') + + def print_state(self): +- print +- print 'Warning: saving this data to disk is insecure!' +- print ++ print() ++ print('Warning: saving this data to disk is insecure!') ++ print() + for key in sorted(self.keys): + if 'priv' in key: + pass + else: + if self.keys[key] is None: +- print key + ': None' ++ print(key + ': None') + elif type(self.keys[key]) is bool: + if self.keys[key]: +- print key + ': True' ++ print(key + ': True') + else: +- print key + ': False' ++ print(key + ': False') + elif type(self.keys[key]) is str: + try: + self.keys[key].decode('ascii') +- print key + ': ' + self.keys[key] ++ print(key + ': ' + self.keys[key]) + except UnicodeDecodeError: +- print key + ': ' + b2a(self.keys[key]) ++ print(key + ': ' + b2a(self.keys[key])) + else: +- print key + ': ' + str(self.keys[key]) ++ print(key + ': ' + str(self.keys[key])) + if self.mode is ALICE_MODE: +- print 'Mode: Alice' ++ print('Mode: Alice') + else: +- print 'Mode: Bob' ++ print('Mode: Bob') + + + class SkippedMessageKey: +@@ -601,7 +601,7 @@ class SqlitePersistence(object): + sql = decrypt_symmetric(self.dbpassphrase, + crypt_sql) + except CryptoError: +- print 'Bad passphrase!' ++ print('Bad passphrase!') + sys.exit(1) + else: + db.cursor().executescript(sql) +@@ -611,7 +611,7 @@ class SqlitePersistence(object): + try: + db.cursor().executescript(sql) + except sqlite3.OperationalError: +- print 'Bad sql! Password problem - cannot create the database.' ++ print('Bad sql! Password problem - cannot create the database.') + sys.exit(1) + except IOError as e: + if e.errno == errno.ENOENT: +@@ -687,7 +687,7 @@ class SqlitePersistence(object): + to_identity = ?''', ( + conversation.name, + conversation.other_name)) +- for skipped_mk in conversation.staged_hk_mk.values(): ++ for skipped_mk in list(conversation.staged_hk_mk.values()): + db.execute(''' + INSERT INTO + skipped_mk ( diff --git a/security/py-pysaml2/Makefile b/security/py-pysaml2/Makefile index c81f2818297..71e221bf330 100644 --- a/security/py-pysaml2/Makefile +++ b/security/py-pysaml2/Makefile @@ -1,7 +1,7 @@ # Created by: Brendan Molloy PORTNAME= pysaml2 -PORTVERSION= 7.1.1 +PORTVERSION= 7.1.2 CATEGORIES= security devel python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/security/py-pysaml2/distinfo b/security/py-pysaml2/distinfo index 3c0d6aa5c44..711aecdfb63 100644 --- a/security/py-pysaml2/distinfo +++ b/security/py-pysaml2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058124 -SHA256 (pysaml2-7.1.1.tar.gz) = 8326b041259f71da4be713df8ee6b5406247dac8799ff5e4a514ac5bdded5ac7 -SIZE (pysaml2-7.1.1.tar.gz) = 366647 +TIMESTAMP = 1647264670 +SHA256 (pysaml2-7.1.2.tar.gz) = 1ec94442306511b93fe7a5710f224e05e0aba948682d506614d1e04f3232f827 +SIZE (pysaml2-7.1.2.tar.gz) = 366722 diff --git a/security/py-pysodium/Makefile b/security/py-pysodium/Makefile index 8ff95700cd9..a9473a447f0 100644 --- a/security/py-pysodium/Makefile +++ b/security/py-pysodium/Makefile @@ -2,6 +2,7 @@ PORTNAME= pysodium PORTVERSION= 0.7.11 +PORTREVISION= 1 CATEGORIES= security python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/security/py-pysodium/files/patch-setup.py b/security/py-pysodium/files/patch-setup.py new file mode 100644 index 00000000000..87df1a9bd8e --- /dev/null +++ b/security/py-pysodium/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2022-01-14 21:55:54 UTC ++++ setup.py +@@ -18,7 +18,7 @@ setup( + license="BSD", + keywords="cryptography API NaCl libsodium", + url="https://github.com/stef/pysodium", +- packages=find_packages(), ++ packages=find_packages(exclude=['test*']), + long_description=read('README.md'), + requires=["libsodium"], + classifiers=["Development Status :: 4 - Beta", diff --git a/security/py-python-openid/files/patch-2to3 b/security/py-python-openid/files/patch-2to3 new file mode 100644 index 00000000000..559e937090c --- /dev/null +++ b/security/py-python-openid/files/patch-2to3 @@ -0,0 +1,997 @@ +--- openid/consumer/consumer.py.orig 2010-06-18 17:08:20 UTC ++++ openid/consumer/consumer.py +@@ -189,7 +189,7 @@ USING THIS LIBRARY + + import cgi + import copy +-from urlparse import urlparse, urldefrag ++from urllib.parse import urlparse, urldefrag + + from openid import fetchers + +@@ -342,7 +342,7 @@ class Consumer(object): + disco = Discovery(self.session, user_url, self.session_key_prefix) + try: + service = disco.getNextService(self._discover) +- except fetchers.HTTPFetchingError, why: ++ except fetchers.HTTPFetchingError as why: + raise DiscoveryFailure( + 'Error fetching XRDS document: %s' % (why[0],), None) + +@@ -378,7 +378,7 @@ class Consumer(object): + + try: + auth_req.setAnonymous(anonymous) +- except ValueError, why: ++ except ValueError as why: + raise ProtocolError(str(why)) + + return auth_req +@@ -639,12 +639,12 @@ class GenericConsumer(object): + def _complete_id_res(self, message, endpoint, return_to): + try: + self._checkSetupNeeded(message) +- except SetupNeededError, why: ++ except SetupNeededError as why: + return SetupNeededResponse(endpoint, why.user_setup_url) + else: + try: + return self._doIdRes(message, endpoint, return_to) +- except (ProtocolError, DiscoveryFailure), why: ++ except (ProtocolError, DiscoveryFailure) as why: + return FailureResponse(endpoint, why[0]) + + def _completeInvalid(self, message, endpoint, _): +@@ -661,7 +661,7 @@ class GenericConsumer(object): + # message. + try: + self._verifyReturnToArgs(message.toPostArgs()) +- except ProtocolError, why: ++ except ProtocolError as why: + oidutil.log("Verifying return_to arguments: %s" % (why[0],)) + return False + +@@ -768,7 +768,7 @@ class GenericConsumer(object): + + try: + timestamp, salt = splitNonce(nonce) +- except ValueError, why: ++ except ValueError as why: + raise ProtocolError('Malformed nonce: %s' % (why[0],)) + + if (self.store is not None and +@@ -867,7 +867,7 @@ class GenericConsumer(object): + # Make sure all non-OpenID arguments in the response are also + # in the signed return_to. + bare_args = message.getArgs(BARE_NS) +- for pair in bare_args.iteritems(): ++ for pair in bare_args.items(): + if pair not in parsed_args: + raise ProtocolError("Parameter %s not in return_to URL" % (pair[0],)) + +@@ -930,7 +930,7 @@ class GenericConsumer(object): + # case. + try: + self._verifyDiscoverySingle(endpoint, to_match) +- except ProtocolError, e: ++ except ProtocolError as e: + oidutil.log( + "Error attempting to use stored discovery information: " + + str(e)) +@@ -975,7 +975,7 @@ class GenericConsumer(object): + self._verifyDiscoverySingle(endpoint, to_match) + except TypeURIMismatch: + self._verifyDiscoverySingle(endpoint, to_match_1_0) +- except ProtocolError, e: ++ except ProtocolError as e: + oidutil.log("Error attempting to use stored discovery information: " + + str(e)) + oidutil.log("Attempting discovery to verify endpoint") +@@ -1068,7 +1068,7 @@ class GenericConsumer(object): + try: + self._verifyDiscoverySingle( + endpoint, to_match_endpoint) +- except ProtocolError, why: ++ except ProtocolError as why: + failure_messages.append(str(why)) + else: + # It matches, so discover verification has +@@ -1096,7 +1096,7 @@ class GenericConsumer(object): + return False + try: + response = self._makeKVPost(request, server_url) +- except (fetchers.HTTPFetchingError, ServerError), e: ++ except (fetchers.HTTPFetchingError, ServerError) as e: + oidutil.log('check_authentication failed: %s' % (e[0],)) + return False + else: +@@ -1178,7 +1178,7 @@ class GenericConsumer(object): + try: + assoc = self._requestAssociation( + endpoint, assoc_type, session_type) +- except ServerError, why: ++ except ServerError as why: + supportedTypes = self._extractSupportedAssociationType(why, + endpoint, + assoc_type) +@@ -1190,7 +1190,7 @@ class GenericConsumer(object): + try: + assoc = self._requestAssociation( + endpoint, assoc_type, session_type) +- except ServerError, why: ++ except ServerError as why: + # Do not keep trying, since it rejected the + # association type that it told us to use. + oidutil.log('Server %s refused its suggested association ' +@@ -1260,17 +1260,17 @@ class GenericConsumer(object): + + try: + response = self._makeKVPost(args, endpoint.server_url) +- except fetchers.HTTPFetchingError, why: ++ except fetchers.HTTPFetchingError as why: + oidutil.log('openid.associate request failed: %s' % (why[0],)) + return None + + try: + assoc = self._extractAssociation(response, assoc_session) +- except KeyError, why: ++ except KeyError as why: + oidutil.log('Missing required parameter in response from %s: %s' + % (endpoint.server_url, why[0])) + return None +- except ProtocolError, why: ++ except ProtocolError as why: + oidutil.log('Protocol error parsing response from %s: %s' % ( + endpoint.server_url, why[0])) + return None +@@ -1393,7 +1393,7 @@ class GenericConsumer(object): + OPENID_NS, 'expires_in', no_default) + try: + expires_in = int(expires_in_str) +- except ValueError, why: ++ except ValueError as why: + raise ProtocolError('Invalid expires_in field: %s' % (why[0],)) + + # OpenID 1 has funny association session behaviour. +@@ -1431,7 +1431,7 @@ class GenericConsumer(object): + # type. + try: + secret = assoc_session.extractSecret(assoc_response) +- except ValueError, why: ++ except ValueError as why: + fmt = 'Malformed response for %s session: %s' + raise ProtocolError(fmt % (assoc_session.session_type, why[0])) + +@@ -1777,7 +1777,7 @@ class SuccessResponse(Response): + """ + msg_args = self.message.getArgs(ns_uri) + +- for key in msg_args.iterkeys(): ++ for key in msg_args.keys(): + if not self.isSigned(ns_uri, key): + oidutil.log("SuccessResponse.getSignedNS: (%s, %s) not signed." + % (ns_uri, key)) +--- openid/consumer/discover.py.orig 2010-06-18 17:08:20 UTC ++++ openid/consumer/discover.py +@@ -13,7 +13,7 @@ __all__ = [ + 'discover', + ] + +-import urlparse ++import urllib.parse + + from openid import oidutil, fetchers, urinorm + +@@ -90,7 +90,7 @@ class OpenIDServiceEndpoint(object): + if self.claimed_id is None: + return None + else: +- return urlparse.urldefrag(self.claimed_id)[0] ++ return urllib.parse.urldefrag(self.claimed_id)[0] + + def compatibilityMode(self): + return self.preferredNamespace() != OPENID_2_0_MESSAGE_NS +@@ -304,10 +304,10 @@ def normalizeURL(url): + DiscoveryFailure""" + try: + normalized = urinorm.urinorm(url) +- except ValueError, why: ++ except ValueError as why: + raise DiscoveryFailure('Normalizing identifier: %s' % (why[0],), None) + else: +- return urlparse.urldefrag(normalized)[0] ++ return urllib.parse.urldefrag(normalized)[0] + + def normalizeXRI(xri): + """Normalize an XRI, stripping its scheme if present""" +@@ -324,7 +324,7 @@ def arrangeByType(service_list, preferred_types): + that element. + + For Python 2.2 compatibility""" +- return zip(range(len(elts)), elts) ++ return list(zip(list(range(len(elts))), elts)) + + def bestMatchingService(service): + """Return the index of the first matching type, or something +@@ -451,7 +451,7 @@ def discoverNoYadis(uri): + return claimed_id, openid_services + + def discoverURI(uri): +- parsed = urlparse.urlparse(uri) ++ parsed = urllib.parse.urlparse(uri) + if parsed[0] and parsed[1]: + if parsed[0] not in ['http', 'https']: + raise DiscoveryFailure('URI scheme is not HTTP or HTTPS', None) +--- openid/dh.py.orig 2010-06-18 17:08:20 UTC ++++ openid/dh.py +@@ -5,11 +5,11 @@ def strxor(x, y): + if len(x) != len(y): + raise ValueError('Inputs to strxor must have the same length') + +- xor = lambda (a, b): chr(ord(a) ^ ord(b)) +- return "".join(map(xor, zip(x, y))) ++ xor = lambda a_b: chr(ord(a_b[0]) ^ ord(a_b[1])) ++ return "".join(map(xor, list(zip(x, y)))) + + class DiffieHellman(object): +- DEFAULT_MOD = 155172898181473697471232257763715539915724801966915404479707795314057629378541917580651227423698188993727816152646631438561595825688188889951272158842675419950341258706556549803580104870537681476726513255747040765857479291291572334510643245094715007229621094194349783925984760375594985848253359305585439638443L ++ DEFAULT_MOD = 155172898181473697471232257763715539915724801966915404479707795314057629378541917580651227423698188993727816152646631438561595825688188889951272158842675419950341258706556549803580104870537681476726513255747040765857479291291572334510643245094715007229621094194349783925984760375594985848253359305585439638443 + + DEFAULT_GEN = 2 + +@@ -19,8 +19,8 @@ class DiffieHellman(object): + fromDefaults = classmethod(fromDefaults) + + def __init__(self, modulus, generator): +- self.modulus = long(modulus) +- self.generator = long(generator) ++ self.modulus = int(modulus) ++ self.generator = int(generator) + + self._setPrivate(cryptutil.randrange(1, modulus - 1)) + +--- openid/extensions/ax.py.orig 2010-06-18 17:08:20 UTC ++++ openid/extensions/ax.py +@@ -229,7 +229,7 @@ class FetchRequest(AXMessage): + + ax_args = self._newArgs() + +- for type_uri, attribute in self.requested_attributes.iteritems(): ++ for type_uri, attribute in self.requested_attributes.items(): + if attribute.alias is None: + alias = aliases.add(type_uri) + else: +@@ -275,7 +275,7 @@ class FetchRequest(AXMessage): + @rtype: [str] + """ + required = [] +- for type_uri, attribute in self.requested_attributes.iteritems(): ++ for type_uri, attribute in self.requested_attributes.items(): + if attribute.required: + required.append(type_uri) + +@@ -304,7 +304,7 @@ class FetchRequest(AXMessage): + self = cls() + try: + self.parseExtensionArgs(ax_args) +- except NotAXMessage, err: ++ except NotAXMessage as err: + return None + + if self.update_url: +@@ -349,7 +349,7 @@ class FetchRequest(AXMessage): + + aliases = NamespaceMap() + +- for key, value in ax_args.iteritems(): ++ for key, value in ax_args.items(): + if key.startswith('type.'): + alias = key[5:] + type_uri = value +@@ -392,7 +392,7 @@ class FetchRequest(AXMessage): + """Iterate over the AttrInfo objects that are + contained in this fetch_request. + """ +- return self.requested_attributes.itervalues() ++ return iter(self.requested_attributes.values()) + + def __iter__(self): + """Iterate over the attribute type URIs in this fetch_request +@@ -467,7 +467,7 @@ class AXKeyValueMessage(AXMessage): + + ax_args = {} + +- for type_uri, values in self.data.iteritems(): ++ for type_uri, values in self.data.items(): + alias = aliases.add(type_uri) + + ax_args['type.' + alias] = type_uri +@@ -499,20 +499,20 @@ class AXKeyValueMessage(AXMessage): + + aliases = NamespaceMap() + +- for key, value in ax_args.iteritems(): ++ for key, value in ax_args.items(): + if key.startswith('type.'): + type_uri = value + alias = key[5:] + checkAlias(alias) + aliases.addAlias(type_uri, alias) + +- for type_uri, alias in aliases.iteritems(): ++ for type_uri, alias in aliases.items(): + try: + count_s = ax_args['count.' + alias] + except KeyError: + value = ax_args['value.' + alias] + +- if value == u'': ++ if value == '': + values = [] + else: + values = [value] +@@ -709,7 +709,7 @@ class FetchResponse(AXKeyValueMessage): + + try: + self.parseExtensionArgs(ax_args) +- except NotAXMessage, err: ++ except NotAXMessage as err: + return None + else: + return self +--- openid/extensions/sreg.py.orig 2010-06-18 17:08:20 UTC ++++ openid/extensions/sreg.py +@@ -41,10 +41,10 @@ from openid.extension import Extension + from openid import oidutil + + try: +- basestring #pylint:disable-msg=W0104 ++ str #pylint:disable-msg=W0104 + except NameError: + # For Python 2.2 +- basestring = (str, unicode) #pylint:disable-msg=W0622 ++ str = (str, str) #pylint:disable-msg=W0622 + + __all__ = [ + 'SRegRequest', +@@ -94,7 +94,7 @@ ns_uri = ns_uri_1_1 + + try: + registerNamespaceAlias(ns_uri_1_1, 'sreg') +-except NamespaceAliasRegistrationError, e: ++except NamespaceAliasRegistrationError as e: + oidutil.log('registerNamespaceAlias(%r, %r) failed: %s' % (ns_uri_1_1, + 'sreg', str(e),)) + +@@ -156,7 +156,7 @@ def getSRegNS(message): + sreg_ns_uri = ns_uri_1_1 + try: + message.namespaces.addAlias(ns_uri_1_1, 'sreg') +- except KeyError, why: ++ except KeyError as why: + # An alias for the string 'sreg' already exists, but it's + # defined for something other than simple registration + raise SRegNamespaceError(why[0]) +@@ -343,7 +343,7 @@ class SRegRequest(Extension): + registration field or strict is set and a field was + requested more than once + """ +- if isinstance(field_names, basestring): ++ if isinstance(field_names, str): + raise TypeError('Fields should be passed as a list of ' + 'strings (not %r)' % (type(field_names),)) + +@@ -489,16 +489,16 @@ class SRegResponse(Extension): + def items(self): + """All of the data values in this simple registration response + """ +- return self.data.items() ++ return list(self.data.items()) + + def iteritems(self): +- return self.data.iteritems() ++ return iter(self.data.items()) + + def keys(self): +- return self.data.keys() ++ return list(self.data.keys()) + + def iterkeys(self): +- return self.data.iterkeys() ++ return iter(self.data.keys()) + + def has_key(self, key): + return key in self +@@ -514,5 +514,5 @@ class SRegResponse(Extension): + checkFieldName(field_name) + return self.data[field_name] + +- def __nonzero__(self): ++ def __bool__(self): + return bool(self.data) +--- openid/fetchers.py.orig 2010-06-18 17:08:20 UTC ++++ openid/fetchers.py +@@ -7,9 +7,9 @@ __all__ = ['fetch', 'getDefaultFetcher', 'setDefaultFe + 'HTTPFetcher', 'createHTTPFetcher', 'HTTPFetchingError', + 'HTTPError'] + +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + import time +-import cStringIO ++import io + import sys + + import openid +@@ -186,7 +186,7 @@ class Urllib2Fetcher(HTTPFetcher): + + # Parameterized for the benefit of testing frameworks, see + # http://trac.openidenabled.com/trac/ticket/85 +- urlopen = staticmethod(urllib2.urlopen) ++ urlopen = staticmethod(urllib.request.urlopen) + + def fetch(self, url, body=None, headers=None): + if not _allowedURL(url): +@@ -199,14 +199,14 @@ class Urllib2Fetcher(HTTPFetcher): + 'User-Agent', + "%s Python-urllib/%s" % (USER_AGENT, urllib2.__version__,)) + +- req = urllib2.Request(url, data=body, headers=headers) ++ req = urllib.request.Request(url, data=body, headers=headers) + try: + f = self.urlopen(req) + try: + return self._makeResponse(f) + finally: + f.close() +- except urllib2.HTTPError, why: ++ except urllib.error.HTTPError as why: + try: + return self._makeResponse(why) + finally: +@@ -216,7 +216,7 @@ class Urllib2Fetcher(HTTPFetcher): + resp = HTTPResponse() + resp.body = urllib2_response.read(MAX_RESPONSE_KB * 1024) + resp.final_url = urllib2_response.geturl() +- resp.headers = dict(urllib2_response.info().items()) ++ resp.headers = dict(list(urllib2_response.info().items())) + + if hasattr(urllib2_response, 'code'): + resp.status = urllib2_response.code +@@ -294,7 +294,7 @@ class CurlHTTPFetcher(HTTPFetcher): + + header_list = [] + if headers is not None: +- for header_name, header_value in headers.iteritems(): ++ for header_name, header_value in headers.items(): + header_list.append('%s: %s' % (header_name, header_value)) + + c = pycurl.Curl() +@@ -313,14 +313,14 @@ class CurlHTTPFetcher(HTTPFetcher): + if not self._checkURL(url): + raise HTTPError("Fetching URL not allowed: %r" % (url,)) + +- data = cStringIO.StringIO() ++ data = io.StringIO() + def write_data(chunk): + if data.tell() > 1024*MAX_RESPONSE_KB: + return 0 + else: + return data.write(chunk) + +- response_header_data = cStringIO.StringIO() ++ response_header_data = io.StringIO() + c.setopt(pycurl.WRITEFUNCTION, write_data) + c.setopt(pycurl.HEADERFUNCTION, response_header_data.write) + c.setopt(pycurl.TIMEOUT, off) +@@ -422,6 +422,6 @@ class HTTPLib2Fetcher(HTTPFetcher): + return HTTPResponse( + body=content, + final_url=final_url, +- headers=dict(httplib2_response.items()), ++ headers=dict(list(httplib2_response.items())), + status=httplib2_response.status, + ) +--- openid/message.py.orig 2010-06-18 17:08:20 UTC ++++ openid/message.py +@@ -6,7 +6,7 @@ __all__ = ['Message', 'NamespaceMap', 'no_default', 'r + + import copy + import warnings +-import urllib ++import urllib.request, urllib.parse, urllib.error + + from openid import oidutil + from openid import kvform +@@ -100,13 +100,11 @@ def registerNamespaceAlias(namespace_uri, alias): + if registered_aliases.get(alias) == namespace_uri: + return + +- if namespace_uri in registered_aliases.values(): +- raise NamespaceAliasRegistrationError, \ +- 'Namespace uri %r already registered' % (namespace_uri,) ++ if namespace_uri in list(registered_aliases.values()): ++ raise NamespaceAliasRegistrationError('Namespace uri %r already registered' % (namespace_uri,)) + + if alias in registered_aliases: +- raise NamespaceAliasRegistrationError, \ +- 'Alias %r already registered' % (alias,) ++ raise NamespaceAliasRegistrationError('Alias %r already registered' % (alias,)) + + registered_aliases[alias] = namespace_uri + +@@ -148,7 +146,7 @@ class Message(object): + + # Partition into "openid." args and bare args + openid_args = {} +- for key, value in args.items(): ++ for key, value in list(args.items()): + if isinstance(value, list): + raise TypeError("query dict must have one value for each key, " + "not lists of values. Query is %r" % (args,)) +@@ -186,7 +184,7 @@ class Message(object): + ns_args = [] + + # Resolve namespaces +- for rest, value in openid_args.iteritems(): ++ for rest, value in openid_args.items(): + try: + ns_alias, ns_key = rest.split('.', 1) + except ValueError: +@@ -266,7 +264,7 @@ class Message(object): + args = {} + + # Add namespace definitions to the output +- for ns_uri, alias in self.namespaces.iteritems(): ++ for ns_uri, alias in self.namespaces.items(): + if self.namespaces.isImplicit(ns_uri): + continue + if alias == NULL_NAMESPACE: +@@ -275,7 +273,7 @@ class Message(object): + ns_key = 'openid.ns.' + alias + args[ns_key] = ns_uri + +- for (ns_uri, ns_key), value in self.args.iteritems(): ++ for (ns_uri, ns_key), value in self.args.items(): + key = self.getKey(ns_uri, ns_key) + args[key] = value.encode('UTF-8') + +@@ -287,7 +285,7 @@ class Message(object): + # FIXME - undocumented exception + post_args = self.toPostArgs() + kvargs = {} +- for k, v in post_args.iteritems(): ++ for k, v in post_args.items(): + if not k.startswith('openid.'): + raise ValueError( + 'This message can only be encoded as a POST, because it ' +@@ -327,7 +325,7 @@ class Message(object): + form = ElementTree.Element('form') + + if form_tag_attrs: +- for name, attr in form_tag_attrs.iteritems(): ++ for name, attr in form_tag_attrs.items(): + form.attrib[name] = attr + + form.attrib['action'] = action_url +@@ -335,7 +333,7 @@ class Message(object): + form.attrib['accept-charset'] = 'UTF-8' + form.attrib['enctype'] = 'application/x-www-form-urlencoded' + +- for name, value in self.toPostArgs().iteritems(): ++ for name, value in self.toPostArgs().items(): + attrs = {'type': 'hidden', + 'name': name, + 'value': value} +@@ -361,9 +359,9 @@ class Message(object): + + def toURLEncoded(self): + """Generate an x-www-urlencoded string""" +- args = self.toPostArgs().items() ++ args = list(self.toPostArgs().items()) + args.sort() +- return urllib.urlencode(args) ++ return urllib.parse.urlencode(args) + + def _fixNS(self, namespace): + """Convert an input value into the internally used values of +@@ -378,7 +376,7 @@ class Message(object): + else: + namespace = self._openid_ns_uri + +- if namespace != BARE_NS and type(namespace) not in [str, unicode]: ++ if namespace != BARE_NS and type(namespace) not in [str, str]: + raise TypeError( + "Namespace must be BARE_NS, OPENID_NS or a string. got %r" + % (namespace,)) +@@ -456,7 +454,7 @@ class Message(object): + return dict([ + (ns_key, value) + for ((pair_ns, ns_key), value) +- in self.args.iteritems() ++ in self.args.items() + if pair_ns == namespace + ]) + +@@ -467,7 +465,7 @@ class Message(object): + @type updates: {unicode:unicode} + """ + namespace = self._fixNS(namespace) +- for k, v in updates.iteritems(): ++ for k, v in updates.items(): + self.setArg(namespace, k, v) + + def setArg(self, namespace, key, value): +@@ -551,7 +549,7 @@ class NamespaceMap(object): + + @returns: iterator of (namespace_uri, alias) + """ +- return self.namespace_to_alias.iteritems() ++ return iter(self.namespace_to_alias.items()) + + def addAlias(self, namespace_uri, desired_alias, implicit=False): + """Add an alias from this namespace URI to the desired alias +@@ -563,7 +561,7 @@ class NamespaceMap(object): + + # Check that desired_alias does not contain a period as per + # the spec. +- if type(desired_alias) in [str, unicode]: ++ if type(desired_alias) in [str, str]: + assert '.' not in desired_alias, \ + "%r must not contain a dot" % (desired_alias,) + +@@ -592,7 +590,7 @@ class NamespaceMap(object): + raise KeyError(fmt % (namespace_uri, desired_alias, alias)) + + assert (desired_alias == NULL_NAMESPACE or +- type(desired_alias) in [str, unicode]), repr(desired_alias) ++ type(desired_alias) in [str, str]), repr(desired_alias) + assert namespace_uri not in self.implicit_namespaces + self.alias_to_namespace[desired_alias] = namespace_uri + self.namespace_to_alias[namespace_uri] = desired_alias +--- openid/oidutil.py.orig 2010-06-18 17:08:20 UTC ++++ openid/oidutil.py +@@ -9,9 +9,9 @@ __all__ = ['log', 'appendArgs', 'toBase64', 'fromBase6 + + import binascii + import sys +-import urlparse ++import urllib.parse + +-from urllib import urlencode ++from urllib.parse import urlencode + + elementtree_modules = [ + 'lxml.etree', +@@ -129,7 +129,7 @@ def appendArgs(url, args): + @rtype: str + """ + if hasattr(args, 'items'): +- args = args.items() ++ args = list(args.items()) + args.sort() + else: + args = list(args) +@@ -164,7 +164,7 @@ def toBase64(s): + def fromBase64(s): + try: + return binascii.a2b_base64(s) +- except binascii.Error, why: ++ except binascii.Error as why: + # Convert to a common exception type + raise ValueError(why[0]) + +--- openid/server/server.py.orig 2010-06-18 17:08:20 UTC ++++ openid/server/server.py +@@ -438,7 +438,7 @@ class AssociateRequest(OpenIDRequest): + + try: + session = session_class.fromMessage(message) +- except ValueError, why: ++ except ValueError as why: + raise ProtocolError(message, 'Error parsing %s session: %s' % + (session_class.session_type, why[0])) + +@@ -1177,7 +1177,7 @@ class Signatory(object): + + try: + valid = assoc.checkMessageSignature(message) +- except ValueError, ex: ++ except ValueError as ex: + oidutil.log("Error in verifying %s with %s: %s" % (message, + assoc, + ex)) +@@ -1225,7 +1225,7 @@ class Signatory(object): + + try: + signed_response.fields = assoc.signMessage(signed_response.fields) +- except kvform.KVFormError, err: ++ except kvform.KVFormError as err: + raise EncodingError(response, explanation=str(err)) + return signed_response + +@@ -1425,7 +1425,7 @@ class Decoder(object): + + try: + message = Message.fromPostArgs(query) +- except InvalidOpenIDNamespace, err: ++ except InvalidOpenIDNamespace as err: + # It's useful to have a Message attached to a ProtocolError, so we + # override the bad ns value to build a Message out of it. Kinda + # kludgy, since it's made of lies, but the parts that aren't lies +@@ -1647,7 +1647,7 @@ class ProtocolError(Exception): + self.openid_message = message + self.reference = reference + self.contact = contact +- assert type(message) not in [str, unicode] ++ assert type(message) not in [str, str] + Exception.__init__(self, text) + + +--- openid/server/trustroot.py.orig 2010-06-18 17:08:20 UTC ++++ openid/server/trustroot.py +@@ -21,7 +21,7 @@ from openid import oidutil + from openid import urinorm + from openid.yadis import services + +-from urlparse import urlparse, urlunparse ++from urllib.parse import urlparse, urlunparse + import re + + ############################################ +@@ -442,7 +442,7 @@ def verifyReturnTo(realm_str, return_to, _vrfy=getAllo + + try: + allowable_urls = _vrfy(realm.buildDiscoveryURL()) +- except RealmVerificationRedirected, err: ++ except RealmVerificationRedirected as err: + oidutil.log(str(err)) + return False + +--- openid/store/filestore.py.orig 2010-06-18 17:08:20 UTC ++++ openid/store/filestore.py +@@ -24,8 +24,8 @@ except ImportError: + for _ in range(5): + name = os.tempnam(dir) + try: +- fd = os.open(name, os.O_CREAT | os.O_EXCL | os.O_RDWR, 0600) +- except OSError, why: ++ fd = os.open(name, os.O_CREAT | os.O_EXCL | os.O_RDWR, 0o600) ++ except OSError as why: + if why.errno != EEXIST: + raise + else: +@@ -82,7 +82,7 @@ def _removeIfPresent(filename): + """ + try: + os.unlink(filename) +- except OSError, why: ++ except OSError as why: + if why.errno == ENOENT: + # Someone beat us to it, but it's gone, so that's OK + return 0 +@@ -102,7 +102,7 @@ def _ensureDir(dir_name): + """ + try: + os.makedirs(dir_name) +- except OSError, why: ++ except OSError as why: + if why.errno != EEXIST or not os.path.isdir(dir_name): + raise + +@@ -220,7 +220,7 @@ class FileOpenIDStore(OpenIDStore): + + try: + os.rename(tmp, filename) +- except OSError, why: ++ except OSError as why: + if why.errno != EEXIST: + raise + +@@ -229,7 +229,7 @@ class FileOpenIDStore(OpenIDStore): + # file, but not in putting the temporary file in place. + try: + os.unlink(filename) +- except OSError, why: ++ except OSError as why: + if why.errno == ENOENT: + pass + else: +@@ -289,7 +289,7 @@ class FileOpenIDStore(OpenIDStore): + def _getAssociation(self, filename): + try: + assoc_file = file(filename, 'rb') +- except IOError, why: ++ except IOError as why: + if why.errno == ENOENT: + # No association exists for that URL and handle + return None +@@ -350,8 +350,8 @@ class FileOpenIDStore(OpenIDStore): + + filename = os.path.join(self.nonce_dir, filename) + try: +- fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0200) +- except OSError, why: ++ fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o200) ++ except OSError as why: + if why.errno == EEXIST: + return False + else: +@@ -363,13 +363,11 @@ class FileOpenIDStore(OpenIDStore): + def _allAssocs(self): + all_associations = [] + +- association_filenames = map( +- lambda filename: os.path.join(self.association_dir, filename), +- os.listdir(self.association_dir)) ++ association_filenames = [os.path.join(self.association_dir, filename) for filename in os.listdir(self.association_dir)] + for association_filename in association_filenames: + try: + association_file = file(association_filename, 'rb') +- except IOError, why: ++ except IOError as why: + if why.errno == ENOENT: + oidutil.log("%s disappeared during %s._allAssocs" % ( + association_filename, self.__class__.__name__)) +--- openid/store/sqlstore.py.orig 2010-06-18 17:08:20 UTC ++++ openid/store/sqlstore.py +@@ -139,11 +139,11 @@ class SQLStore(OpenIDStore): + # Currently the strings in our tables just have ascii in them, + # so this ought to be safe. + def unicode_to_str(arg): +- if isinstance(arg, unicode): ++ if isinstance(arg, str): + return str(arg) + else: + return arg +- str_args = map(unicode_to_str, args) ++ str_args = list(map(unicode_to_str, args)) + self.cur.execute(sql, str_args) + + def __getattr__(self, attr): +@@ -349,7 +349,7 @@ class SQLiteStore(SQLStore): + # message from the OperationalError. + try: + return super(SQLiteStore, self).useNonce(*args, **kwargs) +- except self.exceptions.OperationalError, why: ++ except self.exceptions.OperationalError as why: + if re.match('^columns .* are not unique$', why[0]): + return False + else: +--- openid/urinorm.py.orig 2010-06-18 17:08:20 UTC ++++ openid/urinorm.py +@@ -22,7 +22,7 @@ pct_encoded_pattern = r'%([0-9A-Fa-f]{2})' + pct_encoded_re = re.compile(pct_encoded_pattern) + + try: +- unichr(0x10000) ++ chr(0x10000) + except ValueError: + # narrow python build + UCSCHAR = [ +@@ -73,8 +73,7 @@ _unreserved[ord('~')] = True + + + _escapeme_re = re.compile('[%s]' % (''.join( +- map(lambda (m, n): u'%s-%s' % (unichr(m), unichr(n)), +- UCSCHAR + IPRIVATE)),)) ++ ['%s-%s' % (chr(m_n[0]), chr(m_n[1])) for m_n in UCSCHAR + IPRIVATE]),)) + + + def _pct_escape_unicode(char_match): +@@ -137,7 +136,7 @@ def remove_dot_segments(path): + + + def urinorm(uri): +- if isinstance(uri, unicode): ++ if isinstance(uri, str): + uri = _escapeme_re.sub(_pct_escape_unicode, uri).encode('ascii') + + illegal_mo = uri_illegal_char_re.search(uri) +@@ -171,7 +170,7 @@ def urinorm(uri): + if '%' in host: + host = host.lower() + host = pct_encoded_re.sub(_pct_encoded_replace, host) +- host = unicode(host, 'utf-8').encode('idna') ++ host = str(host, 'utf-8').encode('idna') + else: + host = host.lower() + +--- openid/yadis/etxrd.py.orig 2010-06-18 17:08:20 UTC ++++ openid/yadis/etxrd.py +@@ -67,7 +67,7 @@ def parseXRDS(text): + """ + try: + element = ElementTree.XML(text) +- except XMLError, why: ++ except XMLError as why: + exc = XRDSError('Error parsing document as XML') + exc.reason = why + raise exc +--- openid/yadis/parsehtml.py.orig 2010-06-18 17:08:20 UTC ++++ openid/yadis/parsehtml.py +@@ -1,7 +1,7 @@ + __all__ = ['findHTMLMeta', 'MetaNotFound'] + +-from HTMLParser import HTMLParser, HTMLParseError +-import htmlentitydefs ++from html.parser import HTMLParser, HTMLParseError ++import html.entities + import re + + from openid.yadis.constants import YADIS_HEADER_NAME +@@ -39,12 +39,12 @@ def substituteMO(mo): + codepoint = int(mo.group('dec')) + else: + assert mo.lastgroup == 'word' +- codepoint = htmlentitydefs.name2codepoint.get(mo.group('word')) ++ codepoint = html.entities.name2codepoint.get(mo.group('word')) + + if codepoint is None: + return mo.group() + else: +- return unichr(codepoint) ++ return chr(codepoint) + + def substituteEntities(s): + return ent_re.sub(substituteMO, s) +@@ -180,11 +180,11 @@ def findHTMLMeta(stream): + chunks.append(chunk) + try: + parser.feed(chunk) +- except HTMLParseError, why: ++ except HTMLParseError as why: + # HTML parse error, so bail + chunks.append(stream.read()) + break +- except ParseDone, why: ++ except ParseDone as why: + uri = why[0] + if uri is None: + # Parse finished, but we may need the rest of the file +--- openid/yadis/services.py.orig 2010-06-18 17:08:20 UTC ++++ openid/yadis/services.py +@@ -26,7 +26,7 @@ def getServiceEndpoints(input_url, flt=None): + try: + endpoints = applyFilter(result.normalized_uri, + result.response_text, flt) +- except XRDSError, err: ++ except XRDSError as err: + raise DiscoveryFailure(str(err), None) + return (result.normalized_uri, endpoints) + +--- openid/yadis/xri.py.orig 2010-06-18 17:08:20 UTC ++++ openid/yadis/xri.py +@@ -5,11 +5,12 @@ + """ + + import re ++from functools import reduce + + XRI_AUTHORITIES = ['!', '=', '@', '+', '$', '('] + + try: +- unichr(0x10000) ++ chr(0x10000) + except ValueError: + # narrow python build + UCSCHAR = [ +@@ -50,8 +51,7 @@ else: + + + _escapeme_re = re.compile('[%s]' % (''.join( +- map(lambda (m, n): u'%s-%s' % (unichr(m), unichr(n)), +- UCSCHAR + IPRIVATE)),)) ++ ['%s-%s' % (chr(m_n[0]), chr(m_n[1])) for m_n in UCSCHAR + IPRIVATE]),)) + + + def identifierScheme(identifier): +@@ -147,7 +147,7 @@ def rootAuthority(xri): + # IRI reference. XXX: Can IRI authorities have segments? + segments = authority.split('!') + segments = reduce(list.__add__, +- map(lambda s: s.split('*'), segments)) ++ [s.split('*') for s in segments]) + root = segments[0] + + return XRI(root) diff --git a/security/py-tlslite-ng/Makefile b/security/py-tlslite-ng/Makefile new file mode 100644 index 00000000000..3c45f185186 --- /dev/null +++ b/security/py-tlslite-ng/Makefile @@ -0,0 +1,23 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= tlslite-ng +PORTVERSION= 0.7.6 +CATEGORIES= security python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Pure python implementation of SSL and TLS + +LICENSE= BSD3CLAUSE LGPL21 PD +LICENSE_COMB= multi +LICENSE_FILE= ${WRKSRC}/LICENSE + +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}ecdsa>=0:security/py-ecdsa@${PY_FLAVOR} + +USES= python:3.3+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +.include diff --git a/security/py-tlslite-ng/distinfo b/security/py-tlslite-ng/distinfo new file mode 100644 index 00000000000..7fa2cbfc1b5 --- /dev/null +++ b/security/py-tlslite-ng/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1647264412 +SHA256 (tlslite-ng-0.7.6.tar.gz) = 6ab56f0e9629ce3d807eb528c9112defa9f2e00af2b2961254e8429ca5c1ff00 +SIZE (tlslite-ng-0.7.6.tar.gz) = 479945 diff --git a/security/py-tlslite-ng/pkg-descr b/security/py-tlslite-ng/pkg-descr new file mode 100644 index 00000000000..a162566dbd4 --- /dev/null +++ b/security/py-tlslite-ng/pkg-descr @@ -0,0 +1,7 @@ +tlslite-ng is an open source python library that implements SSL and TLS +cryptographic protocols. It can be used either as a standalone wrapper around +python socket interface or as a backend for multiple other libraries. tlslite-ng +is pure python, however it can use other libraries for faster crypto operations. +tlslite-ng integrates with several stdlib neworking libraries. + +WWW: https://github.com/tlsfuzzer/tlslite-ng diff --git a/security/py-tlslite/files/patch-async b/security/py-tlslite/files/patch-async new file mode 100644 index 00000000000..6a472d67dfa --- /dev/null +++ b/security/py-tlslite/files/patch-async @@ -0,0 +1,89 @@ +--- tlslite/tlsconnection.py.orig 2015-08-04 00:11:09 UTC ++++ tlslite/tlsconnection.py +@@ -68,7 +68,7 @@ class TLSConnection(TLSRecordLayer): + + def handshakeClientAnonymous(self, session=None, settings=None, + checker=None, serverName="", +- async=False): ++ asynchronous=False): + """Perform an anonymous handshake in the role of client. + + This function performs an SSL or TLS handshake using an +@@ -102,8 +102,8 @@ class TLSConnection(TLSRecordLayer): + @type serverName: string + @param serverName: The ServerNameIndication TLS Extension. + +- @type async: bool +- @param async: If False, this function will block until the ++ @type asynchronous: bool ++ @param asynchronous: If False, this function will block until the + handshake is completed. If True, this function will return a + generator. Successive invocations of the generator will + return 0 if it is waiting to read from the socket, 1 if it is +@@ -126,7 +126,7 @@ class TLSConnection(TLSRecordLayer): + settings=settings, + checker=checker, + serverName=serverName) +- if async: ++ if asynchronous: + return handshaker + for result in handshaker: + pass +@@ -134,7 +134,7 @@ class TLSConnection(TLSRecordLayer): + def handshakeClientSRP(self, username, password, session=None, + settings=None, checker=None, + reqTack=True, serverName="", +- async=False): ++ asynchronous=False): + """Perform an SRP handshake in the role of client. + + This function performs a TLS/SRP handshake. SRP mutually +@@ -179,8 +179,8 @@ class TLSConnection(TLSRecordLayer): + @type serverName: string + @param serverName: The ServerNameIndication TLS Extension. + +- @type async: bool +- @param async: If False, this function will block until the ++ @type asynchronous: bool ++ @param asynchronous: If False, this function will block until the + handshake is completed. If True, this function will return a + generator. Successive invocations of the generator will + return 0 if it is waiting to read from the socket, 1 if it is +@@ -208,7 +208,7 @@ class TLSConnection(TLSRecordLayer): + # + # If 'async' is True, the generator is returned to the caller, + # otherwise it is executed to completion here. +- if async: ++ if asynchronous: + return handshaker + for result in handshaker: + pass +@@ -216,7 +216,7 @@ class TLSConnection(TLSRecordLayer): + def handshakeClientCert(self, certChain=None, privateKey=None, + session=None, settings=None, checker=None, + nextProtos=None, reqTack=True, serverName="", +- async=False): ++ asynchronous=False): + """Perform a certificate-based handshake in the role of client. + + This function performs an SSL or TLS handshake. The server +@@ -273,8 +273,8 @@ class TLSConnection(TLSRecordLayer): + @type serverName: string + @param serverName: The ServerNameIndication TLS Extension. + +- @type async: bool +- @param async: If False, this function will block until the ++ @type asynchronous: bool ++ @param asynchronous: If False, this function will block until the + handshake is completed. If True, this function will return a + generator. Successive invocations of the generator will + return 0 if it is waiting to read from the socket, 1 if it is +@@ -303,7 +303,7 @@ class TLSConnection(TLSRecordLayer): + # + # If 'async' is True, the generator is returned to the caller, + # otherwise it is executed to completion here. +- if async: ++ if asynchronous: + return handshaker + for result in handshaker: + pass diff --git a/security/py-xmlsec/Makefile b/security/py-xmlsec/Makefile index 75879fd4dbc..a6a55ed564a 100644 --- a/security/py-xmlsec/Makefile +++ b/security/py-xmlsec/Makefile @@ -2,6 +2,7 @@ PORTNAME= xmlsec PORTVERSION= 1.3.12 +PORTREVISION= 1 CATEGORIES= security python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/security/py-zope.password/files/patch-2to3 b/security/py-zope.password/files/patch-2to3 new file mode 100644 index 00000000000..d71c52e1ba5 --- /dev/null +++ b/security/py-zope.password/files/patch-2to3 @@ -0,0 +1,205 @@ +--- src/zope/password/tests/test_zpasswd.py.orig 2010-05-27 08:08:11 UTC ++++ src/zope/password/tests/test_zpasswd.py +@@ -19,7 +19,7 @@ $Id: test_zpasswd.py 112138 2010-05-07 15:23:02Z ulif + import os + import sys + import unittest, doctest +-from StringIO import StringIO ++from io import StringIO + + from zope.password import password, zpasswd + +@@ -51,23 +51,23 @@ class ArgumentParsingTestCase(TestBase): + argv = ["foo/bar.py"] + args + options = zpasswd.parse_args(argv) + self.assertEqual(options.program, "bar.py") +- self.assert_(options.version) ++ self.assertTrue(options.version) + return options + + def check_stdout_content(self, args): + try: + options = self.parse_args(args) +- except SystemExit, e: ++ except SystemExit as e: + self.assertEqual(e.code, 0) +- self.assert_(self.stdout.getvalue()) +- self.failIf(self.stderr.getvalue()) ++ self.assertTrue(self.stdout.getvalue()) ++ self.assertFalse(self.stderr.getvalue()) + else: + self.fail("expected SystemExit") + + def test_no_arguments(self): + options = self.parse_args([]) +- self.assert_(options.managers) +- self.assert_(not options.destination) ++ self.assertTrue(options.managers) ++ self.assertTrue(not options.destination) + + def test_version_long(self): + self.check_stdout_content(["--version"]) +@@ -88,11 +88,11 @@ class ArgumentParsingTestCase(TestBase): + + def test_config_short(self): + options = self.parse_args(["-c", self.config]) +- self.assert_(options.managers) ++ self.assertTrue(options.managers) + + def test_config_long(self): + options = self.parse_args(["--config", self.config]) +- self.assert_(options.managers) ++ self.assertTrue(options.managers) + + class ControlledInputApplication(zpasswd.Application): + +@@ -123,19 +123,19 @@ class InputCollectionTestCase(TestBase): + + def check_principal(self, expected): + output = self.stdout.getvalue() +- self.failUnless(output) ++ self.assertTrue(output) + + principal_lines = output.splitlines()[-(len(expected) + 1):-1] + for line, expline in zip(principal_lines, expected): +- self.failUnlessEqual(line.strip(), expline) ++ self.assertEqual(line.strip(), expline) + + def test_principal_information(self): + options = self.createOptions() + app = ControlledInputApplication(options, + ["id", "title", "login", "1", "passwd", "passwd", "description"]) + app.process() +- self.failUnless(not self.stderr.getvalue()) +- self.failUnless(app.all_input_consumed()) ++ self.assertTrue(not self.stderr.getvalue()) ++ self.assertTrue(app.all_input_consumed()) + self.check_principal([ + '>destination, principal +- print ++ print(self.title) ++ print(principal, file=destination) ++ print() + + return 0 + +@@ -185,21 +185,21 @@ class Application(object): + while True: + value = self.read_input_line(prompt).strip() + if not value and error: +- print >>sys.stderr, error ++ print(error, file=sys.stderr) + continue + return value + + def get_password_manager(self): + default = 0 + self.print_message("Password manager:") +- print ++ print() + managers = self.options.managers + + for i, (name, manager) in enumerate(managers): +- print "% i. %s" % (i + 1, name) ++ print("% i. %s" % (i + 1, name)) + if name == 'SSHA': + default = i +- print ++ print() + self.need_blank_line = True + while True: + password_manager = self.read_input_line( +@@ -212,8 +212,8 @@ class Application(object): + if index > 0 and index <= len(managers): + index -= 1 + break +- print >>sys.stderr, "You must select a password manager" +- print "%s password manager selected" % managers[index][0] ++ print("You must select a password manager", file=sys.stderr) ++ print("%s password manager selected" % managers[index][0]) + return managers[index] + + def get_password(self): +@@ -221,23 +221,23 @@ class Application(object): + while True: + password = self.read_password("Password: ") + if not password: +- print >>sys.stderr, "Password may not be empty" ++ print("Password may not be empty", file=sys.stderr) + continue + if password != password.strip() or password.split() != [password]: +- print >>sys.stderr, "Password may not contain spaces" ++ print("Password may not contain spaces", file=sys.stderr) + continue + break + again = self.read_password("Verify password: ") + if again != password: +- print >>sys.stderr, "Password not verified!" ++ print("Password not verified!", file=sys.stderr) + sys.exit(1) + return password + + def print_message(self, message): + if self.need_blank_line: +- print ++ print() + self.need_blank_line = False +- print message ++ print(message) + + def get_password_managers(config_path=None): + if not config_path: +@@ -247,7 +247,7 @@ def get_password_managers(config_path=None): + from zope.component import getUtilitiesFor + from zope.password.interfaces import IPasswordManager + +- print "Loading configuration..." ++ print("Loading configuration...") + config = xmlconfig.file(config_path) + managers = [] + for name, manager in getUtilitiesFor(IPasswordManager): diff --git a/security/razorback-scriptNugget/Makefile b/security/razorback-scriptNugget/Makefile index 3f5153a5e51..b5f91e9fd89 100644 --- a/security/razorback-scriptNugget/Makefile +++ b/security/razorback-scriptNugget/Makefile @@ -2,7 +2,7 @@ PORTNAME= scriptNugget PORTVERSION= 0.5.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= security MASTER_SITES= SF/razorbacktm/Nuggets PKGNAMEPREFIX= razorback- diff --git a/security/rubygem-googleauth/Makefile b/security/rubygem-googleauth/Makefile index 1d0daccb235..eb55e31b62a 100644 --- a/security/rubygem-googleauth/Makefile +++ b/security/rubygem-googleauth/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= googleauth -PORTVERSION= 1.1.0 +PORTVERSION= 1.1.2 CATEGORIES= security rubygems MASTER_SITES= RG @@ -11,7 +11,7 @@ COMMENT= Google Auth Library for Ruby LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= rubygem-faraday>=0.17.3<2.0:www/rubygem-faraday \ +RUN_DEPENDS= rubygem-faraday>=0.17.3<3.0:www/rubygem-faraday \ rubygem-jwt>=1.4<3.0:www/rubygem-jwt \ rubygem-memoist>=0.16<1:devel/rubygem-memoist \ rubygem-multi_json>=1.11<2:devel/rubygem-multi_json \ diff --git a/security/rubygem-googleauth/distinfo b/security/rubygem-googleauth/distinfo index f785399412a..2c741db5e8b 100644 --- a/security/rubygem-googleauth/distinfo +++ b/security/rubygem-googleauth/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643971208 -SHA256 (rubygem/googleauth-1.1.0.gem) = 7ab0ce3befdf42abf95276fd6885696272cf9ec032fa48549868e32e4f36f58b -SIZE (rubygem/googleauth-1.1.0.gem) = 38400 +TIMESTAMP = 1647264858 +SHA256 (rubygem/googleauth-1.1.2.gem) = c1c1f155405820f3d37b743bb45700b29fed7b17a38414443eca7f172ec2d29f +SIZE (rubygem/googleauth-1.1.2.gem) = 38912 diff --git a/security/rubygem-securerandom/Makefile b/security/rubygem-securerandom/Makefile index ecb69060ed3..37722f6d7d6 100644 --- a/security/rubygem-securerandom/Makefile +++ b/security/rubygem-securerandom/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= securerandom -PORTVERSION= 0.1.1 +PORTVERSION= 0.2.0 CATEGORIES= security rubygems MASTER_SITES= RG diff --git a/security/rubygem-securerandom/distinfo b/security/rubygem-securerandom/distinfo index cb379d9843c..732467f648a 100644 --- a/security/rubygem-securerandom/distinfo +++ b/security/rubygem-securerandom/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1635181723 -SHA256 (rubygem/securerandom-0.1.1.gem) = f53cd5988cd33f51ea4d7fc488f1be071e86b790949d9bccc578a7e757887fa2 -SIZE (rubygem/securerandom-0.1.1.gem) = 9728 +TIMESTAMP = 1647264860 +SHA256 (rubygem/securerandom-0.2.0.gem) = dd0c10b4c62f182ddd62fa94895ec3b693edc1103f4851e32468694f4f9f73db +SIZE (rubygem/securerandom-0.2.0.gem) = 10240 diff --git a/security/spectre-meltdown-checker/Makefile b/security/spectre-meltdown-checker/Makefile index a9751e43e3a..9a3942a71c5 100644 --- a/security/spectre-meltdown-checker/Makefile +++ b/security/spectre-meltdown-checker/Makefile @@ -1,6 +1,6 @@ PORTNAME= spectre-meltdown-checker DISTVERSIONPREFIX= v -DISTVERSION= 0.44 +DISTVERSION= 0.45 CATEGORIES= security MAINTAINER= pkubaj@FreeBSD.org diff --git a/security/spectre-meltdown-checker/distinfo b/security/spectre-meltdown-checker/distinfo index b89ee902eab..dcd4d62a4d1 100644 --- a/security/spectre-meltdown-checker/distinfo +++ b/security/spectre-meltdown-checker/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1605109163 -SHA256 (speed47-spectre-meltdown-checker-v0.44_GH0.tar.gz) = 96765d765275476c36a146da123fa7e9eb310a84e84ae71b179c9ace3b6ab0c8 -SIZE (speed47-spectre-meltdown-checker-v0.44_GH0.tar.gz) = 54325 +TIMESTAMP = 1648398652 +SHA256 (speed47-spectre-meltdown-checker-v0.45_GH0.tar.gz) = acc9be079b177be94f428ae790b2c60767afa177e28eef9d21dd09986251e467 +SIZE (speed47-spectre-meltdown-checker-v0.45_GH0.tar.gz) = 64263 diff --git a/security/stoken/Makefile b/security/stoken/Makefile index b0b9d585d0e..54b32b6379a 100644 --- a/security/stoken/Makefile +++ b/security/stoken/Makefile @@ -2,6 +2,7 @@ PORTNAME= stoken DISTVERSION= 0.92 +PORTREVISION= 1 CATEGORIES= security MASTER_SITES= SF/stoken diff --git a/security/xmlsec1/Makefile b/security/xmlsec1/Makefile index 476d28d0f70..0b31bc6f8c9 100644 --- a/security/xmlsec1/Makefile +++ b/security/xmlsec1/Makefile @@ -2,6 +2,7 @@ PORTNAME= xmlsec1 PORTVERSION= 1.2.29 +PORTREVISION= 1 CATEGORIES= security MASTER_SITES= http://www.aleksey.com/xmlsec/download/ \ ftp://ftp.aleksey.com/pub/xmlsec/releases/ \ diff --git a/security/xray-core/Makefile b/security/xray-core/Makefile index 4e7338cecfd..cb641d6567a 100644 --- a/security/xray-core/Makefile +++ b/security/xray-core/Makefile @@ -21,33 +21,34 @@ GH_PROJECT= Xray-core GH_TUPLE= \ cheekybits:genny:v1.0.0:cheekybits_genny/vendor/github.com/cheekybits/genny \ davecgh:go-spew:v1.1.1:davecgh_go_spew/vendor/github.com/davecgh/go-spew \ - dgryski:go-metro:2661b20a2446:dgryski_go_metro/vendor/github.com/dgryski/go-metro \ + dgryski:go-metro:adc40b04c140:dgryski_go_metro/vendor/github.com/dgryski/go-metro \ fsnotify:fsnotify:v1.5.1:fsnotify_fsnotify/vendor/github.com/fsnotify/fsnotify \ ghodss:yaml:25d852aebe32:ghodss_yaml/vendor/github.com/ghodss/yaml \ go-task:slim-sprig:348f09dbbbc0:go_task_slim_sprig/vendor/github.com/go-task/slim-sprig \ go-tomb:tomb:dd632973f1e7:go_tomb_tomb/vendor/gopkg.in/tomb.v1 \ go-yaml:yaml:496545a6307b:go_yaml_yaml_1/vendor/gopkg.in/yaml.v3 \ go-yaml:yaml:v2.4.0:go_yaml_yaml/vendor/gopkg.in/yaml.v2 \ - golang:crypto:4570a0811e8b:golang_crypto/vendor/golang.org/x/crypto \ + golang:crypto:198e4374d7ed:golang_crypto/vendor/golang.org/x/crypto \ golang:mock:v1.6.0:golang_mock/vendor/github.com/golang/mock \ golang:mod:v0.5.1:golang_mod/vendor/golang.org/x/mod \ - golang:net:491a49abca63:golang_net/vendor/golang.org/x/net \ + golang:net:cd36cc0744dd:golang_net/vendor/golang.org/x/net \ golang:protobuf:v1.5.2:golang_protobuf/vendor/github.com/golang/protobuf \ golang:sync:036812b2e83c:golang_sync/vendor/golang.org/x/sync \ - golang:sys:4825e8c3871d:golang_sys/vendor/golang.org/x/sys \ + golang:sys:99c3d69c2c27:golang_sys/vendor/golang.org/x/sys \ golang:text:v0.3.7:golang_text/vendor/golang.org/x/text \ - golang:tools:v0.1.8:golang_tools/vendor/golang.org/x/tools \ + golang:tools:v0.1.9:golang_tools/vendor/golang.org/x/tools \ golang:xerrors:5ec99f83aff1:golang_xerrors/vendor/golang.org/x/xerrors \ - google:go-cmp:v0.5.6:google_go_cmp/vendor/github.com/google/go-cmp \ - google:go-genproto:3a66f561d7aa:google_go_genproto/vendor/google.golang.org/genproto \ + google:go-cmp:v0.5.7:google_go_cmp/vendor/github.com/google/go-cmp \ + google:go-genproto:9970aeb2e350:google_go_genproto/vendor/google.golang.org/genproto \ google:starlark-go:70c0e40ae128:google_starlark_go/vendor/go.starlark.net \ gorilla:websocket:v1.4.2:gorilla_websocket/vendor/github.com/gorilla/websocket \ - grpc:grpc-go:v1.43.0:grpc_grpc_go/vendor/google.golang.org/grpc \ + grpc:grpc-go:v1.44.0:grpc_grpc_go/vendor/google.golang.org/grpc \ h12w:socks:v1.0.3:h12w_socks/vendor/h12.io/socks \ - lucas-clemente:quic-go:v0.24.0:lucas_clemente_quic_go/vendor/github.com/lucas-clemente/quic-go \ + lucas-clemente:quic-go:v0.25.0:lucas_clemente_quic_go/vendor/github.com/lucas-clemente/quic-go \ marten-seemann:qtls-go1-16:v0.1.4:marten_seemann_qtls_go1_16/vendor/github.com/marten-seemann/qtls-go1-16 \ marten-seemann:qtls-go1-17:v0.1.0:marten_seemann_qtls_go1_17/vendor/github.com/marten-seemann/qtls-go1-17 \ - miekg:dns:v1.1.43:miekg_dns/vendor/github.com/miekg/dns \ + marten-seemann:qtls-go1-18:v0.1.0-beta.1:marten_seemann_qtls_go1_18/vendor/github.com/marten-seemann/qtls-go1-18 \ + miekg:dns:v1.1.45:miekg_dns/vendor/github.com/miekg/dns \ nxadm:tail:v1.4.8:nxadm_tail/vendor/github.com/nxadm/tail \ onsi:ginkgo:v1.16.5:onsi_ginkgo/vendor/github.com/onsi/ginkgo \ pelletier:go-toml:v1.9.4:pelletier_go_toml/vendor/github.com/pelletier/go-toml \ @@ -65,4 +66,3 @@ GH_TUPLE= \ PLIST_FILES= bin/xray .include - diff --git a/security/xray-core/distinfo b/security/xray-core/distinfo index 90be63fe18b..303ff64fb7c 100644 --- a/security/xray-core/distinfo +++ b/security/xray-core/distinfo @@ -1,12 +1,12 @@ -TIMESTAMP = 1644340828 +TIMESTAMP = 1648442003 SHA256 (xtls-Xray-core-v1.5.3_GH0.tar.gz) = 4b8d78cc20bdf2e8936c02b05d22f0a3231075155ffdc67508d8448ab8858252 SIZE (xtls-Xray-core-v1.5.3_GH0.tar.gz) = 539954 SHA256 (cheekybits-genny-v1.0.0_GH0.tar.gz) = 6982bf513333fb3ee3e6e0633500a3800fb6a3d6beb9e6c6084a96c85a49dd73 SIZE (cheekybits-genny-v1.0.0_GH0.tar.gz) = 15585 SHA256 (davecgh-go-spew-v1.1.1_GH0.tar.gz) = 7d82b9bb7291adbe7498fe946920ab3e7fc9e6cbfc3b2294693fad00bf0dd17e SIZE (davecgh-go-spew-v1.1.1_GH0.tar.gz) = 42152 -SHA256 (dgryski-go-metro-2661b20a2446_GH0.tar.gz) = 5535b1d47fec0b4c30b97d1ef51e987feb9d11987bfd23ea0afc739ce9d7e19a -SIZE (dgryski-go-metro-2661b20a2446_GH0.tar.gz) = 4510 +SHA256 (dgryski-go-metro-adc40b04c140_GH0.tar.gz) = 799a1b742820854095fcfda6b05b7823a27af39a2c7158a5fe9e10be076f7afa +SIZE (dgryski-go-metro-adc40b04c140_GH0.tar.gz) = 4505 SHA256 (fsnotify-fsnotify-v1.5.1_GH0.tar.gz) = bf4be597aef95796bec7c7def6701581c5e153376ba5a5aa31c2d04470d84f1d SIZE (fsnotify-fsnotify-v1.5.1_GH0.tar.gz) = 32691 SHA256 (ghodss-yaml-25d852aebe32_GH0.tar.gz) = 52ad2f9939d39cd22a4267ad385eeef1ae6ae329929bb4117181f87af1689c27 @@ -19,46 +19,48 @@ SHA256 (go-yaml-yaml-496545a6307b_GH0.tar.gz) = ed0e11dc14bbbd4127031d7e8b9e58da SIZE (go-yaml-yaml-496545a6307b_GH0.tar.gz) = 90156 SHA256 (go-yaml-yaml-v2.4.0_GH0.tar.gz) = d8e94679e5fff6bd1a35e10241543929a5f3da44f701755babf99b3daf0faac0 SIZE (go-yaml-yaml-v2.4.0_GH0.tar.gz) = 73209 -SHA256 (golang-crypto-4570a0811e8b_GH0.tar.gz) = 3928242f5c64763c8e259a7032e7334031f537893a6ed0bdb52c84238e364c91 -SIZE (golang-crypto-4570a0811e8b_GH0.tar.gz) = 1734767 +SHA256 (golang-crypto-198e4374d7ed_GH0.tar.gz) = 6f5a93e22d91ef36efcbd3312fef2f238ec68fb006a7abb945cc637be67818ef +SIZE (golang-crypto-198e4374d7ed_GH0.tar.gz) = 1734026 SHA256 (golang-mock-v1.6.0_GH0.tar.gz) = 470174971c3a63361149a30f5b2d3a716a198afeb6cc71daa30712faa7293942 SIZE (golang-mock-v1.6.0_GH0.tar.gz) = 69251 SHA256 (golang-mod-v0.5.1_GH0.tar.gz) = 0ceb1aa06f263416ac67b6844a1704cc8078749c7e8ea9c3900a5c06d00e6036 SIZE (golang-mod-v0.5.1_GH0.tar.gz) = 112665 -SHA256 (golang-net-491a49abca63_GH0.tar.gz) = aa6aa87bb950aa94e0b44ebd29b707b272863baefb7ad7613a9753f0487bec42 -SIZE (golang-net-491a49abca63_GH0.tar.gz) = 1226608 +SHA256 (golang-net-cd36cc0744dd_GH0.tar.gz) = ec4567caa40e6a703e0881443623d13a812136dc2fc13935e129835fb5336076 +SIZE (golang-net-cd36cc0744dd_GH0.tar.gz) = 1228759 SHA256 (golang-protobuf-v1.5.2_GH0.tar.gz) = 088cc0f3ba18fb8f9d00319568ff0af5a06d8925a6e6cb983bb837b4efb703b3 SIZE (golang-protobuf-v1.5.2_GH0.tar.gz) = 171702 SHA256 (golang-sync-036812b2e83c_GH0.tar.gz) = 75ac8fc16bdceb2496c4a9cc98584b70c29032d91a9e57a624acb073e3232fda SIZE (golang-sync-036812b2e83c_GH0.tar.gz) = 18752 -SHA256 (golang-sys-4825e8c3871d_GH0.tar.gz) = f54fe514321c1d71f13deff3a185cd193bde1f794c41dee9a276b61a800a2bf8 -SIZE (golang-sys-4825e8c3871d_GH0.tar.gz) = 1255011 +SHA256 (golang-sys-99c3d69c2c27_GH0.tar.gz) = 6e2029cdfa0c8cd0bfdc49d17694970de73ddaaadb9fecb66f724b32c153aea0 +SIZE (golang-sys-99c3d69c2c27_GH0.tar.gz) = 1257135 SHA256 (golang-text-v0.3.7_GH0.tar.gz) = 7cab2f6c3133ac1d422edd952b0dd2082fa55a73c2663fb2defd9bf83d649b26 SIZE (golang-text-v0.3.7_GH0.tar.gz) = 8354718 -SHA256 (golang-tools-v0.1.8_GH0.tar.gz) = f4449b1bc8aea5864bffadf507f4b690edae12b707c07c2f7497d6d5ff8497ec -SIZE (golang-tools-v0.1.8_GH0.tar.gz) = 2929454 +SHA256 (golang-tools-v0.1.9_GH0.tar.gz) = 21ea4ee3971ae00c0d1e9a9e8b658ae638dd7292a974dad7da15f1fa8967c8bc +SIZE (golang-tools-v0.1.9_GH0.tar.gz) = 2939407 SHA256 (golang-xerrors-5ec99f83aff1_GH0.tar.gz) = 71975d658357e170fd6a41f92539cde8b39c9cd8bfe5931b6311bc5f5c0da0d7 SIZE (golang-xerrors-5ec99f83aff1_GH0.tar.gz) = 13664 -SHA256 (google-go-cmp-v0.5.6_GH0.tar.gz) = cba2118596f694c135b3f3d6fe49b73ca2a7882aa4d7f346e341cb106afce5f1 -SIZE (google-go-cmp-v0.5.6_GH0.tar.gz) = 104421 -SHA256 (google-go-genproto-3a66f561d7aa_GH0.tar.gz) = 32918285394f05e8763c0bb7914fa3db1b0bbb20429e6eb836976b668f60954f -SIZE (google-go-genproto-3a66f561d7aa_GH0.tar.gz) = 12193520 +SHA256 (google-go-cmp-v0.5.7_GH0.tar.gz) = 70c779a3f2625d7ab5a427b4244b93c2e6a8d1cd779c182fc4fad7ff864a4fb2 +SIZE (google-go-cmp-v0.5.7_GH0.tar.gz) = 104499 +SHA256 (google-go-genproto-9970aeb2e350_GH0.tar.gz) = 199ef40d9230cc2e3f5ddce2d851d49a2efd5ad11e43eda545412e6892ddd6fd +SIZE (google-go-genproto-9970aeb2e350_GH0.tar.gz) = 12509797 SHA256 (google-starlark-go-70c0e40ae128_GH0.tar.gz) = 0c5f75018a875fbed15705761135245103215c4bf2a10242fc1ca4891a631dbc SIZE (google-starlark-go-70c0e40ae128_GH0.tar.gz) = 268831 SHA256 (gorilla-websocket-v1.4.2_GH0.tar.gz) = 91937a36bc9e0da3c895c73d4cb74b2cdb1aff54ab21b0d0724000e7b5b85b84 SIZE (gorilla-websocket-v1.4.2_GH0.tar.gz) = 54101 -SHA256 (grpc-grpc-go-v1.43.0_GH0.tar.gz) = 333ae8d72b0cb94eb815917a4994314459ef487dd04ddae82a5cc985816a0963 -SIZE (grpc-grpc-go-v1.43.0_GH0.tar.gz) = 1386087 +SHA256 (grpc-grpc-go-v1.44.0_GH0.tar.gz) = d733af3137f3d835dea70904f73d18b4cb34dc2cbc4d2ee8f6b9912dd38d22eb +SIZE (grpc-grpc-go-v1.44.0_GH0.tar.gz) = 1399762 SHA256 (h12w-socks-v1.0.3_GH0.tar.gz) = 50b107b8f3e06d6f8bcc00f9e8a63e490e426a62617f36700bc336de055f613e SIZE (h12w-socks-v1.0.3_GH0.tar.gz) = 14641 -SHA256 (lucas-clemente-quic-go-v0.24.0_GH0.tar.gz) = 550483dfa5e6b100fd1c2809ff208106b2d1ffe0786f6353269a895f17d87034 -SIZE (lucas-clemente-quic-go-v0.24.0_GH0.tar.gz) = 520818 +SHA256 (lucas-clemente-quic-go-v0.25.0_GH0.tar.gz) = bba6b949f664c5d5ea0809994ef292a0868be6add030224e9bf687d0bd93a0fa +SIZE (lucas-clemente-quic-go-v0.25.0_GH0.tar.gz) = 521826 SHA256 (marten-seemann-qtls-go1-16-v0.1.4_GH0.tar.gz) = e6166cfc140acb6cfc11526444640e31ed47cf8b9c31f5812904a7735ecd8aa6 SIZE (marten-seemann-qtls-go1-16-v0.1.4_GH0.tar.gz) = 415515 SHA256 (marten-seemann-qtls-go1-17-v0.1.0_GH0.tar.gz) = e1c22bac3a614f31b0ca5d32c2a5aeee9b5032f3b23b8951810c21e990ed3997 SIZE (marten-seemann-qtls-go1-17-v0.1.0_GH0.tar.gz) = 421611 -SHA256 (miekg-dns-v1.1.43_GH0.tar.gz) = 889d61c9ce9594ef0fa4b8b601fcf094082b8c12f5638063652d8dee7ee5339a -SIZE (miekg-dns-v1.1.43_GH0.tar.gz) = 200617 +SHA256 (marten-seemann-qtls-go1-18-v0.1.0-beta.1_GH0.tar.gz) = b79985a11fc247ae9226cc461a2bfe5e5e99ffbfa15ba470a223dd2c838dde3a +SIZE (marten-seemann-qtls-go1-18-v0.1.0-beta.1_GH0.tar.gz) = 422198 +SHA256 (miekg-dns-v1.1.45_GH0.tar.gz) = cdca43779afe34dc56de4dfcbb73a8be50a2d243e2a0b8ea8d373eab98ebd882 +SIZE (miekg-dns-v1.1.45_GH0.tar.gz) = 203445 SHA256 (nxadm-tail-v1.4.8_GH0.tar.gz) = 8208362046819275a0809000dceacbd7b2a7caa07bcd6547dd2ff9b2104fa56c SIZE (nxadm-tail-v1.4.8_GH0.tar.gz) = 1255770 SHA256 (onsi-ginkgo-v1.16.5_GH0.tar.gz) = 0380c81321b764b75e76a7aa8fc8ab1ab361232a88d5b6124ef8b9a9e75d5287 diff --git a/sysutils/accountsservice/Makefile b/sysutils/accountsservice/Makefile index 8e00f5c7079..c0dac8c1723 100644 --- a/sysutils/accountsservice/Makefile +++ b/sysutils/accountsservice/Makefile @@ -2,7 +2,7 @@ PORTNAME= accountsservice PORTVERSION= 0.6.55 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= sysutils devel MASTER_SITES= http://www.freedesktop.org/software/${PORTNAME}/ diff --git a/sysutils/brasero/Makefile b/sysutils/brasero/Makefile index 0b90d4736d9..b2b2e9cdaf0 100644 --- a/sysutils/brasero/Makefile +++ b/sysutils/brasero/Makefile @@ -2,6 +2,7 @@ PORTNAME= brasero PORTVERSION= 3.12.3 +PORTREVISION= 1 CATEGORIES= sysutils audio multimedia gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/sysutils/bulk_extractor/Makefile b/sysutils/bulk_extractor/Makefile index 0514cf409b7..d2c1d1df906 100644 --- a/sysutils/bulk_extractor/Makefile +++ b/sysutils/bulk_extractor/Makefile @@ -2,6 +2,7 @@ PORTNAME= bulk_extractor PORTVERSION= 2.0.0 +PORTREVISION= 1 CATEGORIES= sysutils MAINTAINER= nobutaka@FreeBSD.org diff --git a/sysutils/cbsd/Makefile b/sysutils/cbsd/Makefile index e0a19dcf569..6712101a53c 100644 --- a/sysutils/cbsd/Makefile +++ b/sysutils/cbsd/Makefile @@ -1,5 +1,5 @@ PORTNAME= cbsd -DISTVERSION= 13.0.25 +DISTVERSION= 13.0.26 CATEGORIES= sysutils MAINTAINER= olevole@olevole.ru diff --git a/sysutils/cbsd/distinfo b/sysutils/cbsd/distinfo index 4a44d73b2c7..b139dadfe1b 100644 --- a/sysutils/cbsd/distinfo +++ b/sysutils/cbsd/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647886193 -SHA256 (cbsd-cbsd-13.0.25_GH0.tar.gz) = dd3d7c807d7d844d6cf25917f146710ba72d649fbf1be4d987cf166fd2b415f8 -SIZE (cbsd-cbsd-13.0.25_GH0.tar.gz) = 5581685 +TIMESTAMP = 1648397401 +SHA256 (cbsd-cbsd-13.0.26_GH0.tar.gz) = 52628b83099760ea53737ffc12eeded87635952048dfe95ea3ac1b2a7253ea1f +SIZE (cbsd-cbsd-13.0.26_GH0.tar.gz) = 5581790 diff --git a/sysutils/cbsd/pkg-plist b/sysutils/cbsd/pkg-plist index 2bc3f0347ec..603e5530888 100644 --- a/sysutils/cbsd/pkg-plist +++ b/sysutils/cbsd/pkg-plist @@ -219,7 +219,7 @@ cbsd/etc/defaults/vm-linux-Kali-2022-amd64.conf cbsd/etc/defaults/vm-linux-Mint-20.conf cbsd/etc/defaults/vm-linux-OracleLinux-7.conf cbsd/etc/defaults/vm-linux-OracleLinux-8.conf -cbsd/etc/defaults/vm-linux-Parrot-security-4-x64.conf +cbsd/etc/defaults/vm-linux-Parrot-security-5-x64.conf cbsd/etc/defaults/vm-linux-Rocky-8-x86_64.conf cbsd/etc/defaults/vm-linux-TinyCore-x86-13.conf cbsd/etc/defaults/vm-linux-TrueNAS-Scale-22.conf diff --git a/sysutils/cfengine-devel/Makefile b/sysutils/cfengine-devel/Makefile index 10a11db17b5..b38342de802 100644 --- a/sysutils/cfengine-devel/Makefile +++ b/sysutils/cfengine-devel/Makefile @@ -2,6 +2,7 @@ PORTNAME= cfengine PORTVERSION= 3.${CFENGINE_COMMIT_DATE} +PORTREVISION= 1 CATEGORIES= sysutils PKGNAMESUFFIX= -devel CFENGINE_HASH= 9bc6e3d83 diff --git a/sysutils/cfengine316/Makefile b/sysutils/cfengine316/Makefile index d328ef92397..7767ba98acf 100644 --- a/sysutils/cfengine316/Makefile +++ b/sysutils/cfengine316/Makefile @@ -2,6 +2,7 @@ PORTNAME= cfengine PORTVERSION= 3.16.0 +PORTREVISION= 1 CATEGORIES= sysutils MASTER_SITES= https://s3.amazonaws.com/cfengine-package-repos/tarballs/ diff --git a/sysutils/cfengine317/Makefile b/sysutils/cfengine317/Makefile index 5c1b0307ecb..46e11389eaf 100644 --- a/sysutils/cfengine317/Makefile +++ b/sysutils/cfengine317/Makefile @@ -2,6 +2,7 @@ PORTNAME= cfengine PORTVERSION= 3.17.0 +PORTREVISION= 1 CATEGORIES= sysutils MASTER_SITES= https://s3.amazonaws.com/cfengine-package-repos/tarballs/ diff --git a/sysutils/cfengine318/Makefile b/sysutils/cfengine318/Makefile index d864ae4bade..77bf7e85277 100644 --- a/sysutils/cfengine318/Makefile +++ b/sysutils/cfengine318/Makefile @@ -2,6 +2,7 @@ PORTNAME= cfengine PORTVERSION= 3.18.1 +PORTREVISION= 1 CATEGORIES= sysutils MASTER_SITES= https://s3.amazonaws.com/cfengine-package-repos/tarballs/ diff --git a/sysutils/cfengine319/Makefile b/sysutils/cfengine319/Makefile index be199418df3..dbb6e45d38d 100644 --- a/sysutils/cfengine319/Makefile +++ b/sysutils/cfengine319/Makefile @@ -2,6 +2,7 @@ PORTNAME= cfengine PORTVERSION= 3.19.0 +PORTREVISION= 1 CATEGORIES= sysutils MASTER_SITES= https://s3.amazonaws.com/cfengine-package-repos/tarballs/ diff --git a/sysutils/cinnamon-control-center/Makefile b/sysutils/cinnamon-control-center/Makefile index 1ad2967d019..d9528dd2cd1 100644 --- a/sysutils/cinnamon-control-center/Makefile +++ b/sysutils/cinnamon-control-center/Makefile @@ -2,6 +2,7 @@ PORTNAME= cinnamon-control-center PORTVERSION= 4.8.2 +PORTREVISION= 1 CATEGORIES= sysutils gnome DIST_SUBDIR= gnome diff --git a/sysutils/cinnamon-settings-daemon/Makefile b/sysutils/cinnamon-settings-daemon/Makefile index 6f502c5e0a1..d615afcb16b 100644 --- a/sysutils/cinnamon-settings-daemon/Makefile +++ b/sysutils/cinnamon-settings-daemon/Makefile @@ -2,6 +2,7 @@ PORTNAME= cinnamon-settings-daemon PORTVERSION= 4.8.5 +PORTREVISION= 1 CATEGORIES= sysutils gnome DIST_SUBDIR= gnome diff --git a/sysutils/cluster-glue/Makefile b/sysutils/cluster-glue/Makefile index bc1a1c0acab..acce99a1d4d 100644 --- a/sysutils/cluster-glue/Makefile +++ b/sysutils/cluster-glue/Makefile @@ -1,6 +1,6 @@ PORTNAME= cluster-glue PORTVERSION= 1.0.12 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= sysutils MASTER_SITES= http://hg.linux-ha.org/glue/archive/ DISTNAME= 0a7add1d9996 diff --git a/sysutils/consolekit2/Makefile b/sysutils/consolekit2/Makefile index f8df785b21f..33bc82aaf8f 100644 --- a/sysutils/consolekit2/Makefile +++ b/sysutils/consolekit2/Makefile @@ -2,7 +2,7 @@ PORTNAME= consolekit PORTVERSION= 1.2.4 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= sysutils gnome PKGNAMESUFFIX= 2 diff --git a/sysutils/fluent-bit/Makefile b/sysutils/fluent-bit/Makefile index 946ba13ed02..ccf6d35815e 100644 --- a/sysutils/fluent-bit/Makefile +++ b/sysutils/fluent-bit/Makefile @@ -2,7 +2,7 @@ PORTNAME= fluent-bit DISTVERSIONPREFIX= v -DISTVERSION= 1.9.0 +DISTVERSION= 1.9.1 CATEGORIES= sysutils MAINTAINER= girgen@FreeBSD.org diff --git a/sysutils/fluent-bit/distinfo b/sysutils/fluent-bit/distinfo index 5cd161347a2..e7fbd279685 100644 --- a/sysutils/fluent-bit/distinfo +++ b/sysutils/fluent-bit/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647854587 -SHA256 (fluent-fluent-bit-v1.9.0_GH0.tar.gz) = ea6f8d8ea68c63ee1eee1059f6d48947209b9f38a40f29fcfdc2188ec6cdc999 -SIZE (fluent-fluent-bit-v1.9.0_GH0.tar.gz) = 15312145 +TIMESTAMP = 1648409460 +SHA256 (fluent-fluent-bit-v1.9.1_GH0.tar.gz) = b0c06f8cc7e5571d9768efe56e59d9aa7efec04c797fd18a3268406973a5b72d +SIZE (fluent-fluent-bit-v1.9.1_GH0.tar.gz) = 15318400 diff --git a/sysutils/fusefs-s3fs/Makefile b/sysutils/fusefs-s3fs/Makefile index 0e838a87559..3483fff41ad 100644 --- a/sysutils/fusefs-s3fs/Makefile +++ b/sysutils/fusefs-s3fs/Makefile @@ -3,6 +3,7 @@ PORTNAME= s3fs DISTVERSIONPREFIX= v DISTVERSION= 1.90 +PORTREVISION= 1 CATEGORIES= sysutils PKGNAMEPREFIX= fusefs- diff --git a/sysutils/gapcmon/Makefile b/sysutils/gapcmon/Makefile index 6fc91bfc2e6..59f05602a0a 100644 --- a/sysutils/gapcmon/Makefile +++ b/sysutils/gapcmon/Makefile @@ -1,6 +1,6 @@ PORTNAME= gapcmon PORTVERSION= 0.8.9 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= sysutils MASTER_SITES= SF diff --git a/sysutils/gconf-editor/Makefile b/sysutils/gconf-editor/Makefile index 134f2f0e0a7..daace182acc 100644 --- a/sysutils/gconf-editor/Makefile +++ b/sysutils/gconf-editor/Makefile @@ -3,7 +3,7 @@ PORTNAME= gconf-editor PORTVERSION= 3.0.1 -PORTREVISION= 3 +PORTREVISION= 4 PORTEPOCH= 1 CATEGORIES= sysutils gnome MASTER_SITES= GNOME diff --git a/sysutils/gksu/Makefile b/sysutils/gksu/Makefile index c3da679f52c..ca9be03013d 100644 --- a/sysutils/gksu/Makefile +++ b/sysutils/gksu/Makefile @@ -2,7 +2,7 @@ PORTNAME= gksu PORTVERSION= 2.0.2 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= sysutils MASTER_SITES= http://people.debian.org/~kov/gksu/ diff --git a/sysutils/gnome-control-center/Makefile b/sysutils/gnome-control-center/Makefile index ee15ef8af7a..8e78b763bcc 100644 --- a/sysutils/gnome-control-center/Makefile +++ b/sysutils/gnome-control-center/Makefile @@ -2,12 +2,13 @@ PORTNAME= gnome-control-center PORTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= sysutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome MAINTAINER= gnome@FreeBSD.org -COMMENT= Control center for GNOME 3 project +COMMENT= Control center for the GNOME desktop LICENSE= GPLv2+ LICENSE_FILE= ${WRKSRC}/COPYING diff --git a/sysutils/gnome-control-center/files/patch-panels_user-accounts_cc-realm-manager.c b/sysutils/gnome-control-center/files/patch-panels_user-accounts_cc-realm-manager.c new file mode 100644 index 00000000000..10c223ceb15 --- /dev/null +++ b/sysutils/gnome-control-center/files/patch-panels_user-accounts_cc-realm-manager.c @@ -0,0 +1,25 @@ +Fix for Heimdal (versus MIT). + +Index: panels/user-accounts/cc-realm-manager.c +--- panels/user-accounts/cc-realm-manager.c.orig ++++ panels/user-accounts/cc-realm-manager.c +@@ -22,7 +22,7 @@ + + #include "cc-realm-manager.h" + +-#include ++#include + + #include + #include +@@ -637,8 +637,10 @@ login_perform_kinit (krb5_context k5, + code = krb5_get_init_creds_opt_alloc (k5, &opts); + g_return_val_if_fail (code == 0, code); + ++#ifdef HAVE_KRB5_GET_INIT_CREDS_OPT_SET_OUT_CCACHE + code = krb5_get_init_creds_opt_set_out_ccache (k5, opts, ccache); + g_return_val_if_fail (code == 0, code); ++#endif + + code = krb5_get_init_creds_password (k5, &creds, principal, + (char *)password, diff --git a/sysutils/gnome-control-center/pkg-descr b/sysutils/gnome-control-center/pkg-descr index 98b39195a2b..0f7f031b28b 100644 --- a/sysutils/gnome-control-center/pkg-descr +++ b/sysutils/gnome-control-center/pkg-descr @@ -1,3 +1,3 @@ -Configuration tools tightly integrated within the GNOME 3 desktop. +Configuration tools tightly integrated within the GNOME desktop. WWW: https://www.gnome.org/ diff --git a/sysutils/gnome-system-monitor/Makefile b/sysutils/gnome-system-monitor/Makefile index ca6a845a2c8..3d73bc65dcf 100644 --- a/sysutils/gnome-system-monitor/Makefile +++ b/sysutils/gnome-system-monitor/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-system-monitor PORTVERSION= 41.0 +PORTREVISION= 1 CATEGORIES= sysutils gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/sysutils/gomi/Makefile b/sysutils/gomi/Makefile index 82c7fd095f5..96d948688b7 100644 --- a/sysutils/gomi/Makefile +++ b/sysutils/gomi/Makefile @@ -5,7 +5,7 @@ PORTVERSION= 1.1.1 DISTVERSIONPREFIX= v CATEGORIES= sysutils -MAINTAINER= danfe@FreeBSD.org +MAINTAINER= ports@FreeBSD.org COMMENT= Unix rm(1) command that can restore deleted files LICENSE= MIT @@ -13,7 +13,6 @@ LICENSE= MIT USES= go:modules USE_GITHUB= yes GH_ACCOUNT= b4b4r07 - GH_TUPLE= \ b4b4r07:go-cli-log:8fac4d71de01:b4b4r07_go_cli_log/vendor/github.com/b4b4r07/go-cli-log \ chzyer:readline:2972be24d48e:chzyer_readline/vendor/github.com/chzyer/readline \ diff --git a/sysutils/graveman/Makefile b/sysutils/graveman/Makefile index 2f14514ba13..c9f301739fe 100644 --- a/sysutils/graveman/Makefile +++ b/sysutils/graveman/Makefile @@ -2,7 +2,7 @@ PORTNAME= graveman DISTVERSION= 0.3.12-5 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= sysutils MASTER_SITES= http://graveman.tuxfamily.org/sources/ diff --git a/sysutils/gsmartcontrol/Makefile b/sysutils/gsmartcontrol/Makefile index 6301fa7ebb7..aeece114017 100644 --- a/sysutils/gsmartcontrol/Makefile +++ b/sysutils/gsmartcontrol/Makefile @@ -2,7 +2,7 @@ PORTNAME= gsmartcontrol PORTVERSION= 1.1.3 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= sysutils MASTER_SITES= SF/${PORTNAME}/${PORTVERSION} diff --git a/sysutils/gstreamer1-plugins-cdio/Makefile b/sysutils/gstreamer1-plugins-cdio/Makefile index 44e496d2fb3..e3181a1aaad 100644 --- a/sysutils/gstreamer1-plugins-cdio/Makefile +++ b/sysutils/gstreamer1-plugins-cdio/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= sysutils COMMENT= GStreamer compact disc input and control library diff --git a/sysutils/istatserver/Makefile b/sysutils/istatserver/Makefile index fd4ce7643d1..0af9ef085b9 100644 --- a/sysutils/istatserver/Makefile +++ b/sysutils/istatserver/Makefile @@ -2,6 +2,7 @@ PORTNAME= istatserver PORTVERSION= 3.02 +PORTREVISION= 1 CATEGORIES= sysutils MASTER_SITES= https://s3.amazonaws.com/bjango/files/istatserverlinux/ diff --git a/sysutils/jstest-gtk/Makefile b/sysutils/jstest-gtk/Makefile index baf77de94e3..120ee1b7acf 100644 --- a/sysutils/jstest-gtk/Makefile +++ b/sysutils/jstest-gtk/Makefile @@ -1,6 +1,6 @@ PORTNAME= jstest-gtk PORTVERSION= 20180710 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= sysutils MAINTAINER= rozhuk.im@gmail.com diff --git a/sysutils/khelpcenter/Makefile b/sysutils/khelpcenter/Makefile index d2e74caf6d3..b34c2c31e45 100644 --- a/sysutils/khelpcenter/Makefile +++ b/sysutils/khelpcenter/Makefile @@ -1,5 +1,6 @@ PORTNAME= khelpcenter DISTVERSION= ${KDE_APPLICATIONS_VERSION} +PORTREVISION= 1 CATEGORIES= sysutils kde kde-applications # kde kde-applications-plasma MAINTAINER= kde@FreeBSD.org diff --git a/sysutils/libgksu/Makefile b/sysutils/libgksu/Makefile index a16c181ad3d..b9f7e27b9ca 100644 --- a/sysutils/libgksu/Makefile +++ b/sysutils/libgksu/Makefile @@ -2,7 +2,7 @@ PORTNAME= libgksu PORTVERSION= 2.0.12 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= sysutils MASTER_SITES= http://people.debian.org/~kov/gksu/ diff --git a/sysutils/ltfs/Makefile b/sysutils/ltfs/Makefile index c02d344ba7a..dcdcd0bf8cd 100644 --- a/sysutils/ltfs/Makefile +++ b/sysutils/ltfs/Makefile @@ -4,7 +4,7 @@ PORTNAME= ltfs DISTVERSIONPREFIX= v DISTVERSION= 2.4.4.0 DISTVERSIONSUFFIX= -10470 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= sysutils MAINTAINER= kbowling@FreeBSD.org diff --git a/sysutils/lttng-tools/Makefile b/sysutils/lttng-tools/Makefile index 746d387046c..628d9ca8399 100644 --- a/sysutils/lttng-tools/Makefile +++ b/sysutils/lttng-tools/Makefile @@ -2,7 +2,7 @@ PORTNAME= lttng-tools PORTVERSION= 2.9.3 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= sysutils MASTER_SITES= http://lttng.org/files/${PORTNAME}/ diff --git a/sysutils/mate-control-center/Makefile b/sysutils/mate-control-center/Makefile index f411478c35a..95812015bf6 100644 --- a/sysutils/mate-control-center/Makefile +++ b/sysutils/mate-control-center/Makefile @@ -2,6 +2,7 @@ PORTNAME= mate-control-center PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= sysutils mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/sysutils/mate-system-monitor/Makefile b/sysutils/mate-system-monitor/Makefile index a68aa2078ad..863f4935eba 100644 --- a/sysutils/mate-system-monitor/Makefile +++ b/sysutils/mate-system-monitor/Makefile @@ -2,6 +2,7 @@ PORTNAME= mate-system-monitor PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= sysutils mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/sysutils/nitrogen/Makefile b/sysutils/nitrogen/Makefile index cbfa430f1ab..4fb27261485 100644 --- a/sysutils/nitrogen/Makefile +++ b/sysutils/nitrogen/Makefile @@ -2,7 +2,7 @@ PORTNAME= nitrogen DISTVERSION= 1.6.1 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= sysutils MASTER_SITES= https://github.com/l3ib/nitrogen/releases/download/${DISTVERSION}/ diff --git a/sysutils/osinfo-db-tools/Makefile b/sysutils/osinfo-db-tools/Makefile index ae8409dfb57..6c18373d293 100644 --- a/sysutils/osinfo-db-tools/Makefile +++ b/sysutils/osinfo-db-tools/Makefile @@ -2,6 +2,7 @@ PORTNAME= osinfo-db-tools PORTVERSION= 1.10.0 +PORTREVISION= 1 CATEGORIES= sysutils MASTER_SITES= https://releases.pagure.org/libosinfo/ diff --git a/sysutils/pam_mount/Makefile b/sysutils/pam_mount/Makefile index 35a29357c94..a705ab96137 100644 --- a/sysutils/pam_mount/Makefile +++ b/sysutils/pam_mount/Makefile @@ -2,7 +2,7 @@ PORTNAME= pam_mount PORTVERSION= 2.12 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= sysutils MASTER_SITES= SF/${PORTNAME:S/_/-/}/${PORTNAME}/${PORTVERSION}/ diff --git a/sysutils/pam_xdg/Makefile b/sysutils/pam_xdg/Makefile index 4022096cfb8..2a2df336658 100644 --- a/sysutils/pam_xdg/Makefile +++ b/sysutils/pam_xdg/Makefile @@ -10,7 +10,7 @@ LICENSE= ISCL USES= uidfix USE_GITHUB= yes -GH_ACCOUNT= jbeich # mirror +GH_ACCOUNT= sdaoden GH_PROJECT= s-toolbox GH_TAGNAME= cdbe3ce PLIST_FILES= lib/${PORTNAME}.so \ diff --git a/sysutils/pam_xdg/distinfo b/sysutils/pam_xdg/distinfo index 3a691bf4d50..91edb057362 100644 --- a/sysutils/pam_xdg/distinfo +++ b/sysutils/pam_xdg/distinfo @@ -1,3 +1,3 @@ TIMESTAMP = 1631823427 -SHA256 (jbeich-s-toolbox-s20210916-cdbe3ce_GH0.tar.gz) = a63a9630df2808ab6bf051ba29f154270eb4e7ab0c7e523ef59a0e31d71b6efb -SIZE (jbeich-s-toolbox-s20210916-cdbe3ce_GH0.tar.gz) = 100666 +SHA256 (sdaoden-s-toolbox-s20210916-cdbe3ce_GH0.tar.gz) = a63a9630df2808ab6bf051ba29f154270eb4e7ab0c7e523ef59a0e31d71b6efb +SIZE (sdaoden-s-toolbox-s20210916-cdbe3ce_GH0.tar.gz) = 100666 diff --git a/sysutils/py-ansible-lint/Makefile b/sysutils/py-ansible-lint/Makefile index f7e720eb062..d40d071c49e 100644 --- a/sysutils/py-ansible-lint/Makefile +++ b/sysutils/py-ansible-lint/Makefile @@ -1,5 +1,5 @@ PORTNAME= ansible-lint -PORTVERSION= 5.4.0 +PORTVERSION= 6.0.2 CATEGORIES= sysutils python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -7,8 +7,8 @@ PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} MAINTAINER= matthew@FreeBSD.org COMMENT= Checks playbooks for sub-optimal practices and behaviour -LICENSE= MIT -LICENSE_FILE= ${WRKSRC}/LICENSE +LICENSE= GPLv3 +LICENSE_FILE= ${WRKSRC}/COPYING RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}ansible-core>=2.10:sysutils/py-ansible-core@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}ruamel.yaml>=0.15.37:devel/py-ruamel.yaml@${PY_FLAVOR} \ @@ -18,7 +18,8 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}ansible-core>=2.10:sysutils/py-ansible-core@ ${PYTHON_PKGNAMEPREFIX}yaml>=0:devel/py-yaml@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}wcmatch>=7.0:textproc/py-wcmatch@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}tenacity>=7.0.0:devel/py-tenacity@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}packaging>=20.9:devel/py-packaging@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}packaging>=20.9:devel/py-packaging@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}yamllint>=1.25.0:devel/py-yamllint@${PY_FLAVOR} BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}setuptools_scm>=3.5.0:devel/py-setuptools_scm@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}setuptools_scm_git_archive>=1.0:devel/py-setuptools_scm_git_archive@${PY_FLAVOR} TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}flaky>=3.7.0:devel/py-flaky@${PY_FLAVOR} \ @@ -28,7 +29,7 @@ TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}flaky>=3.7.0:devel/py-flaky@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}psutil>0:sysutils/py-psutil@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}setuptools>=42.0.0:devel/py-setuptools@${PY_FLAVOR} -USES= python:3.6+ +USES= python:3.8+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/sysutils/py-ansible-lint/distinfo b/sysutils/py-ansible-lint/distinfo index 15f52761413..1328bbc8a7e 100644 --- a/sysutils/py-ansible-lint/distinfo +++ b/sysutils/py-ansible-lint/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1644749911 -SHA256 (ansible-lint-5.4.0.tar.gz) = 2160a60b4ab034c04006d701a1779340ffb0f6e28f030ff8de958e1062a88962 -SIZE (ansible-lint-5.4.0.tar.gz) = 363312 +TIMESTAMP = 1648305546 +SHA256 (ansible-lint-6.0.2.tar.gz) = b539bc22d13e6de0cc2e25758e1d28b2bc01561a414ae37ceda3708b5a2a79ed +SIZE (ansible-lint-6.0.2.tar.gz) = 209451 diff --git a/sysutils/py-diffoscope/Makefile b/sysutils/py-diffoscope/Makefile index a5feed48d10..aef61be7ebb 100644 --- a/sysutils/py-diffoscope/Makefile +++ b/sysutils/py-diffoscope/Makefile @@ -1,7 +1,7 @@ # Created by: Kubilay Kocak PORTNAME= diffoscope -PORTVERSION= 207 +PORTVERSION= 208 CATEGORIES= sysutils python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/sysutils/py-diffoscope/distinfo b/sysutils/py-diffoscope/distinfo index 95e4f39ad3a..600ba2da1a9 100644 --- a/sysutils/py-diffoscope/distinfo +++ b/sysutils/py-diffoscope/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646778016 -SHA256 (diffoscope-207.tar.gz) = e670160911c8e465ab178f23bd1a2e6a827032b7bbec5f24eebf9215f1ae5f54 -SIZE (diffoscope-207.tar.gz) = 2707223 +TIMESTAMP = 1648306052 +SHA256 (diffoscope-208.tar.gz) = 2c5c0ac1159eefce158154849fe67f0f527dffc5295bfd3ca1aef14962ffcbcb +SIZE (diffoscope-208.tar.gz) = 2709374 diff --git a/sysutils/py-drmaa/files/patch-2to3 b/sysutils/py-drmaa/files/patch-2to3 new file mode 100644 index 00000000000..1e6268bbbd0 --- /dev/null +++ b/sysutils/py-drmaa/files/patch-2to3 @@ -0,0 +1,28 @@ +--- drmaa/nt.py.orig 2010-05-27 10:25:17 UTC ++++ drmaa/nt.py +@@ -31,7 +31,7 @@ def namedtuple(typename, field_names, verbose=False): + + # Parse and validate the field names. Validation serves two purposes, + # generating informative error messages and preventing template injection attacks. +- if isinstance(field_names, basestring): ++ if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas + field_names = tuple(field_names) + for name in (typename,) + field_names: +@@ -81,13 +81,13 @@ def namedtuple(typename, field_names, verbose=False): + for i, name in enumerate(field_names): + template += ' %s = property(itemgetter(%d))\n' % (name, i) + if verbose: +- print template ++ print(template) + + # Execute the template string in a temporary namespace + namespace = dict(itemgetter=_itemgetter) + try: +- exec template in namespace +- except SyntaxError, e: ++ exec(template, namespace) ++ except SyntaxError as e: + raise SyntaxError(e.message + ':\n' + template) + result = namespace[typename] + diff --git a/sysutils/py-hared/Makefile b/sysutils/py-hared/Makefile index 83d727b22bb..643a59ca704 100644 --- a/sysutils/py-hared/Makefile +++ b/sysutils/py-hared/Makefile @@ -1,6 +1,6 @@ PORTNAME= hared PORTVERSION= 1.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= sysutils python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/sysutils/py-hared/files/patch-2to3 b/sysutils/py-hared/files/patch-2to3 new file mode 100644 index 00000000000..6dd4496c906 --- /dev/null +++ b/sysutils/py-hared/files/patch-2to3 @@ -0,0 +1,30 @@ +--- hared/__init__.py.orig 2018-03-26 17:30:13 UTC ++++ hared/__init__.py +@@ -4,7 +4,7 @@ import json + try: + from configparser import ConfigParser + except ImportError: +- from ConfigParser import ConfigParser ++ from configparser import ConfigParser + + __author__ = 'Jan-Piet Mens ' + +@@ -30,8 +30,8 @@ class Hare(): + pass + + def printconfig(self): +- print "Listening for UDP on %s:%d" % (self.listenhost, self.listenport) +- print "MQTT broker configured to %s:%d on %s" % (self.mqtthost, self.mqttport, self.topic) ++ print("Listening for UDP on %s:%d" % (self.listenhost, self.listenport)) ++ print("MQTT broker configured to %s:%d on %s" % (self.mqtthost, self.mqttport, self.topic)) + + def run(config='/usr/local/etc/hared.ini'): + h = Hare(config) +@@ -53,6 +53,6 @@ def run(config='/usr/local/etc/hared.ini'): + continue + + if h.verbose: +- print js ++ print(js) + + mqtt.single(h.topic, js, hostname=h.mqtthost, port=h.mqttport) diff --git a/sysutils/py-hared/files/patch-setup.py b/sysutils/py-hared/files/patch-setup.py new file mode 100644 index 00000000000..227adb5f89c --- /dev/null +++ b/sysutils/py-hared/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2018-03-26 17:22:26 UTC ++++ setup.py +@@ -8,7 +8,7 @@ setup(name='hared', + version='1.0', + description='hare daemon', + long_description=readme(), +- lassifiers=[ ++ classifiers=[ + 'Development Status :: 3 - Alpha', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 2.7', diff --git a/sysutils/py-mitogen/files/patch-2to3 b/sysutils/py-mitogen/files/patch-2to3 new file mode 100644 index 00000000000..5a2522d2a28 --- /dev/null +++ b/sysutils/py-mitogen/files/patch-2to3 @@ -0,0 +1,169 @@ +--- ansible_mitogen/compat/simplejson/decoder.py.orig 2019-11-02 17:59:13 UTC ++++ ansible_mitogen/compat/simplejson/decoder.py +@@ -56,8 +56,8 @@ _CONSTANTS = { + + STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) + BACKSLASH = { +- '"': u'"', '\\': u'\\', '/': u'/', +- 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t', ++ '"': '"', '\\': '\\', '/': '/', ++ 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', + } + + DEFAULT_ENCODING = "utf-8" +@@ -85,8 +85,8 @@ def py_scanstring(s, end, encoding=None, strict=True, + content, terminator = chunk.groups() + # Content is contains zero or more unescaped string characters + if content: +- if not isinstance(content, unicode): +- content = unicode(content, encoding) ++ if not isinstance(content, str): ++ content = str(content, encoding) + _append(content) + # Terminator is the end of string, a literal control character, + # or a backslash denoting that an escape sequence follows +@@ -132,11 +132,11 @@ def py_scanstring(s, end, encoding=None, strict=True, + uni2 = int(esc2, 16) + uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) + next_end += 6 +- char = unichr(uni) ++ char = chr(uni) + end = next_end + # Append the unescaped character + _append(char) +- return u''.join(chunks), end ++ return ''.join(chunks), end + + + # Use speedup if available +@@ -145,7 +145,8 @@ scanstring = c_scanstring or py_scanstring + WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) + WHITESPACE_STR = ' \t\n\r' + +-def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR): ++def JSONObject(xxx_todo_changeme, encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR): ++ (s, end) = xxx_todo_changeme + pairs = {} + # Use a slice to prevent IndexError from being raised, the following + # check will raise a more specific ValueError if the string is empty +@@ -220,7 +221,8 @@ def JSONObject((s, end), encoding, strict, scan_once, + pairs = object_hook(pairs) + return pairs, end + +-def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): ++def JSONArray(xxx_todo_changeme1, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): ++ (s, end) = xxx_todo_changeme1 + values = [] + nextchar = s[end:end + 1] + if nextchar in _ws: +--- ansible_mitogen/compat/simplejson/encoder.py.orig 2019-11-02 17:59:13 UTC ++++ ansible_mitogen/compat/simplejson/encoder.py +@@ -184,7 +184,7 @@ class JSONEncoder(object): + + """ + # This is for extremely simple cases and benchmarks. +- if isinstance(o, basestring): ++ if isinstance(o, str): + if isinstance(o, str): + _encoding = self.encoding + if (_encoding is not None +@@ -261,18 +261,15 @@ class JSONEncoder(object): + + def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, + ## HACK: hand-optimized bytecode; turn globals into locals +- False=False, +- True=True, + ValueError=ValueError, +- basestring=basestring, ++ str=str, + dict=dict, + float=float, + id=id, + int=int, + isinstance=isinstance, + list=list, +- long=long, +- str=str, ++ long=int, + tuple=tuple, + ): + +@@ -300,7 +297,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + first = False + else: + buf = separator +- if isinstance(value, basestring): ++ if isinstance(value, str): + yield buf + _encoder(value) + elif value is None: + yield buf + 'null' +@@ -308,7 +305,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield buf + 'true' + elif value is False: + yield buf + 'false' +- elif isinstance(value, (int, long)): ++ elif isinstance(value, int): + yield buf + str(value) + elif isinstance(value, float): + yield buf + _floatstr(value) +@@ -349,12 +346,12 @@ def _make_iterencode(markers, _default, _encoder, _ind + item_separator = _item_separator + first = True + if _sort_keys: +- items = dct.items() ++ items = list(dct.items()) + items.sort(key=lambda kv: kv[0]) + else: +- items = dct.iteritems() ++ items = iter(dct.items()) + for key, value in items: +- if isinstance(key, basestring): ++ if isinstance(key, str): + pass + # JavaScript is weakly typed for these, so it makes sense to + # also allow them. Many encoders seem to do something like this. +@@ -366,7 +363,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + key = 'false' + elif key is None: + key = 'null' +- elif isinstance(key, (int, long)): ++ elif isinstance(key, int): + key = str(key) + elif _skipkeys: + continue +@@ -378,7 +375,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield item_separator + yield _encoder(key) + yield _key_separator +- if isinstance(value, basestring): ++ if isinstance(value, str): + yield _encoder(value) + elif value is None: + yield 'null' +@@ -386,7 +383,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield 'true' + elif value is False: + yield 'false' +- elif isinstance(value, (int, long)): ++ elif isinstance(value, int): + yield str(value) + elif isinstance(value, float): + yield _floatstr(value) +@@ -407,7 +404,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + del markers[markerid] + + def _iterencode(o, _current_indent_level): +- if isinstance(o, basestring): ++ if isinstance(o, str): + yield _encoder(o) + elif o is None: + yield 'null' +@@ -415,7 +412,7 @@ def _make_iterencode(markers, _default, _encoder, _ind + yield 'true' + elif o is False: + yield 'false' +- elif isinstance(o, (int, long)): ++ elif isinstance(o, int): + yield str(o) + elif isinstance(o, float): + yield _floatstr(o) diff --git a/sysutils/py-mqttwarn/Makefile b/sysutils/py-mqttwarn/Makefile index 4cede578bf8..6a4f1cf9f8d 100644 --- a/sysutils/py-mqttwarn/Makefile +++ b/sysutils/py-mqttwarn/Makefile @@ -1,5 +1,5 @@ PORTNAME= mqttwarn -DISTVERSION= 0.22.0 +DISTVERSION= 0.28.1 CATEGORIES= sysutils python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -70,6 +70,9 @@ TWILIO_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}twilio>6.11.0:devel/py-twilio@${PY_FL TWITTER_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}python-twitter>=3.4.1:net/py-python-twitter@${PY_FLAVOR} WEBSOCKET_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}websocket-client>=0.47.0:www/py-websocket-client@${PY_FLAVOR} +post-patch: + @${RM} ${WRKSRC}/tests/bad_funcs.py + do-test: @cd ${WRKSRC} && ${PYTHON_CMD} -m pytest -v -rs -o addopts= diff --git a/sysutils/py-mqttwarn/distinfo b/sysutils/py-mqttwarn/distinfo index d4623ad0520..f271604816e 100644 --- a/sysutils/py-mqttwarn/distinfo +++ b/sysutils/py-mqttwarn/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1622892763 -SHA256 (mqttwarn-0.22.0.tar.gz) = d84497d602f1a11e28943af89aa394492c6049e6fb7ea4653122c2183e56d98b -SIZE (mqttwarn-0.22.0.tar.gz) = 117091 +TIMESTAMP = 1648227979 +SHA256 (mqttwarn-0.28.1.tar.gz) = 0b8ec4b6b924a4a3651b90283717aaee698d65ca217c7fb2bdd00498d708fdce +SIZE (mqttwarn-0.28.1.tar.gz) = 120342 diff --git a/sysutils/py-power/files/patch-2to3 b/sysutils/py-power/files/patch-2to3 new file mode 100644 index 00000000000..fc02a4134b6 --- /dev/null +++ b/sysutils/py-power/files/patch-2to3 @@ -0,0 +1,24 @@ +--- power/tests.py.orig 2012-12-07 08:20:49 UTC ++++ power/tests.py +@@ -27,10 +27,10 @@ class TestPowerManagementCommon(unittest.TestCase): + + class TestObserver(power.PowerManagementObserver): + def on_power_sources_change(self, power_management): +- print "on_power_sources_change" ++ print("on_power_sources_change") + + def on_time_remaining_change(self, power_management): +- print "on_time_remaining_change" ++ print("on_time_remaining_change") + + + if __name__ == "__main__": +@@ -38,7 +38,7 @@ if __name__ == "__main__": + p = power.PowerManagement() + p.add_observer(o) + try: +- print "Power management observer is registered" ++ print("Power management observer is registered") + import time + while True: + time.sleep(1) diff --git a/sysutils/quicksynergy/Makefile b/sysutils/quicksynergy/Makefile index 44b23813575..a7972fc56c6 100644 --- a/sysutils/quicksynergy/Makefile +++ b/sysutils/quicksynergy/Makefile @@ -2,7 +2,7 @@ PORTNAME= quicksynergy PORTVERSION= 0.9.0 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= sysutils MASTER_SITES= SF/${PORTNAME}/Linux/${PORTVERSION} diff --git a/sysutils/restic/Makefile b/sysutils/restic/Makefile index e9bf9955efa..fb94d31e112 100644 --- a/sysutils/restic/Makefile +++ b/sysutils/restic/Makefile @@ -1,6 +1,6 @@ PORTNAME= restic DISTVERSIONPREFIX= v -DISTVERSION= 0.12.1 +DISTVERSION= 0.13.0 CATEGORIES= sysutils MAINTAINER= ports@FreeBSD.org @@ -24,7 +24,7 @@ GH_TUPLE= \ census-instrumentation:opencensus-go:v0.23.0:census_instrumentation_opencensus_go/vendor/go.opencensus.io \ cespare:xxhash:v2.1.1:cespare_xxhash_v2/vendor/github.com/cespare/xxhash/v2 \ cpuguy83:go-md2man:v2.0.0:cpuguy83_go_md2man_v2/vendor/github.com/cpuguy83/go-md2man/v2 \ - dchest:siphash:v1.2.2:dchest_siphash/vendor/github.com/dchest/siphash \ + dnaeon:go-vcr:v1.2.0:dnaeon_go_vcr/vendor/github.com/dnaeon/go-vcr \ dustin:go-humanize:v1.0.0:dustin_go_humanize/vendor/github.com/dustin/go-humanize \ elithrar:simple-scrypt:v1.3.0:elithrar_simple_scrypt/vendor/github.com/elithrar/simple-scrypt \ form3tech-oss:jwt-go:v3.2.2:form3tech_oss_jwt_go/vendor/github.com/form3tech-oss/jwt-go \ @@ -63,18 +63,18 @@ GH_TUPLE= \ klauspost:cpuid:v1.3.1:klauspost_cpuid/vendor/github.com/klauspost/cpuid \ klauspost:cpuid:v2.0.4:klauspost_cpuid_v2/vendor/github.com/klauspost/cpuid/v2 \ kr:fs:v0.1.0:kr_fs/vendor/github.com/kr/fs \ - kurin:blazer:v0.5.3:kurin_blazer/vendor/github.com/kurin/blazer \ + kurin:blazer:ba894c124ac6:kurin_blazer/vendor/github.com/kurin/blazer \ minio:md5-simd:v1.1.0:minio_md5_simd/vendor/github.com/minio/md5-simd \ - minio:minio-go:v7.0.12:minio_minio_go_v7/vendor/github.com/minio/minio-go/v7 \ + minio:minio-go:v7.0.14:minio_minio_go_v7/vendor/github.com/minio/minio-go/v7 \ minio:sha256-simd:v1.0.0:minio_sha256_simd/vendor/github.com/minio/sha256-simd \ mitchellh:go-homedir:v1.1.0:mitchellh_go_homedir/vendor/github.com/mitchellh/go-homedir \ modern-go:concurrent:bacd9c7ef1dd:modern_go_concurrent/vendor/github.com/modern-go/concurrent \ modern-go:reflect2:v1.0.1:modern_go_reflect2/vendor/github.com/modern-go/reflect2 \ - ncw:swift:v1.0.53:ncw_swift/vendor/github.com/ncw/swift \ + ncw:swift:v2.0.0:ncw_swift_v2/vendor/github.com/ncw/swift/v2 \ pkg:errors:v0.9.1:pkg_errors/vendor/github.com/pkg/errors \ pkg:profile:v1.6.0:pkg_profile/vendor/github.com/pkg/profile \ pkg:sftp:v1.13.2:pkg_sftp/vendor/github.com/pkg/sftp \ - pkg:xattr:v0.4.3:pkg_xattr/vendor/github.com/pkg/xattr \ + pkg:xattr:v0.4.5:pkg_xattr/vendor/github.com/pkg/xattr \ protocolbuffers:protobuf-go:v1.26.0:protocolbuffers_protobuf_go/vendor/google.golang.org/protobuf \ restic:chunker:v0.4.0:restic_chunker/vendor/github.com/restic/chunker \ rs:xid:v1.2.1:rs_xid/vendor/github.com/rs/xid \ diff --git a/sysutils/restic/distinfo b/sysutils/restic/distinfo index 1c14e5914ce..db8719bfd64 100644 --- a/sysutils/restic/distinfo +++ b/sysutils/restic/distinfo @@ -1,6 +1,6 @@ -TIMESTAMP = 1628709863 -SHA256 (restic-restic-v0.12.1_GH0.tar.gz) = a9c88d5288ce04a6cc78afcda7590d3124966dab3daa9908de9b3e492e2925fb -SIZE (restic-restic-v0.12.1_GH0.tar.gz) = 23829887 +TIMESTAMP = 1648448928 +SHA256 (restic-restic-v0.13.0_GH0.tar.gz) = b3c09137b462548f44d764f98909534bef6e85fe029d4daf60545642cdefd3dd +SIZE (restic-restic-v0.13.0_GH0.tar.gz) = 23855262 SHA256 (Azure-azure-sdk-for-go-v55.6.0_GH0.tar.gz) = 8d865433adf738731485dadb630cd4eca423b275d1f17b67269a008828cfe21e SIZE (Azure-azure-sdk-for-go-v55.6.0_GH0.tar.gz) = 39148061 SHA256 (Azure-go-autorest-79a63e70c4d8_GH0.tar.gz) = d7156c20c47fcd04edae683e718a1e72dcb9178a3b07e6c18615cee5b1dd67d6 @@ -25,8 +25,8 @@ SHA256 (cespare-xxhash-v2.1.1_GH0.tar.gz) = 0ee31178d2c5a1249be4e26294a2f428008d SIZE (cespare-xxhash-v2.1.1_GH0.tar.gz) = 9292 SHA256 (cpuguy83-go-md2man-v2.0.0_GH0.tar.gz) = 50537880d42fc28b9c2e9aaa36b137349d43cc73d46436a499f8c928cd2fc576 SIZE (cpuguy83-go-md2man-v2.0.0_GH0.tar.gz) = 52021 -SHA256 (dchest-siphash-v1.2.2_GH0.tar.gz) = 1edd183315f198d37f7c6308e31f89868d0c0692b893ee7827e497eeb9dca0b0 -SIZE (dchest-siphash-v1.2.2_GH0.tar.gz) = 10705 +SHA256 (dnaeon-go-vcr-v1.2.0_GH0.tar.gz) = 91904d173052c3f72f3258cf4e165e0dbfd23a3b5cfc735169e39e59ab9a3c9a +SIZE (dnaeon-go-vcr-v1.2.0_GH0.tar.gz) = 82208 SHA256 (dustin-go-humanize-v1.0.0_GH0.tar.gz) = e4540bd50ac855143b4f2e509313079c50cf5d8774f09cc10dbca5ae9803d8ba SIZE (dustin-go-humanize-v1.0.0_GH0.tar.gz) = 17260 SHA256 (elithrar-simple-scrypt-v1.3.0_GH0.tar.gz) = 6457e3a591c71a2b639c3c1ca2e419da6cc87b1b8d2f49c2d4ff22045eaebf53 @@ -103,12 +103,12 @@ SHA256 (klauspost-cpuid-v2.0.4_GH0.tar.gz) = 12cc5d89b732bcd884148909a10c506b1b9 SIZE (klauspost-cpuid-v2.0.4_GH0.tar.gz) = 339132 SHA256 (kr-fs-v0.1.0_GH0.tar.gz) = 5743b4ec1e7c4a336c40c9936989174dc9bfc1fb19640d7308c04892453f6c8d SIZE (kr-fs-v0.1.0_GH0.tar.gz) = 4405 -SHA256 (kurin-blazer-v0.5.3_GH0.tar.gz) = 845a28eb11c3ee746495350aa56035086280f99870b552bbb5be80db59358869 -SIZE (kurin-blazer-v0.5.3_GH0.tar.gz) = 86965 +SHA256 (kurin-blazer-ba894c124ac6_GH0.tar.gz) = c98a9991cdff2f289adf60aa2e9fcbb0f88f46f31f87157ef5054f26331c379c +SIZE (kurin-blazer-ba894c124ac6_GH0.tar.gz) = 87630 SHA256 (minio-md5-simd-v1.1.0_GH0.tar.gz) = f4565eeff59db2fa45fb5f49936381d9a5fad8bd621be33cf96ffd30077111c0 SIZE (minio-md5-simd-v1.1.0_GH0.tar.gz) = 99235 -SHA256 (minio-minio-go-v7.0.12_GH0.tar.gz) = 733169bde1a24761cd3ad32c34ef712bf5d886e3a1723495fdfc0107c5dfadeb -SIZE (minio-minio-go-v7.0.12_GH0.tar.gz) = 247797 +SHA256 (minio-minio-go-v7.0.14_GH0.tar.gz) = f10a2c6bf76c9d653315e7308a118f839ccc6e3026dfc7e7d869acb983ad429b +SIZE (minio-minio-go-v7.0.14_GH0.tar.gz) = 253788 SHA256 (minio-sha256-simd-v1.0.0_GH0.tar.gz) = f992f67a47d16983f9bab99203aaab044618f13ca1de507c33a70a53de8331e0 SIZE (minio-sha256-simd-v1.0.0_GH0.tar.gz) = 49020 SHA256 (mitchellh-go-homedir-v1.1.0_GH0.tar.gz) = 646671c73a84a8dfb4a5a76b80c7b63549ffefa906524d45077301bc7da76600 @@ -117,16 +117,16 @@ SHA256 (modern-go-concurrent-bacd9c7ef1dd_GH0.tar.gz) = d673e902118a6ece63198dc7 SIZE (modern-go-concurrent-bacd9c7ef1dd_GH0.tar.gz) = 7526 SHA256 (modern-go-reflect2-v1.0.1_GH0.tar.gz) = d24e856d9aa8fd51b9e6c2cdd712a44c8d18cb8b72802f1bd16e0470322363fd SIZE (modern-go-reflect2-v1.0.1_GH0.tar.gz) = 14394 -SHA256 (ncw-swift-v1.0.53_GH0.tar.gz) = bf8e19e6fa5b78cfeaa5744e4be7439d45e40a3dd429a7a58397ad0e6e8d9144 -SIZE (ncw-swift-v1.0.53_GH0.tar.gz) = 65614 +SHA256 (ncw-swift-v2.0.0_GH0.tar.gz) = 2a2010487ca1d505255e6c0f07783946ab44c9ebc004e0b256be538a1fd06178 +SIZE (ncw-swift-v2.0.0_GH0.tar.gz) = 66361 SHA256 (pkg-errors-v0.9.1_GH0.tar.gz) = 56bfd893023daa498508bfe161de1be83299fcf15376035e7df79cbd7d6fa608 SIZE (pkg-errors-v0.9.1_GH0.tar.gz) = 13415 SHA256 (pkg-profile-v1.6.0_GH0.tar.gz) = cf4476de0cee00169d779bac80b20d2d532f6d12df6f87d0244883c6590b79c9 SIZE (pkg-profile-v1.6.0_GH0.tar.gz) = 5885 SHA256 (pkg-sftp-v1.13.2_GH0.tar.gz) = aa3cbe8ace3df7c136ca607fa7af3e82c4c5515e9e373c42cd9a4148cec1fa9c SIZE (pkg-sftp-v1.13.2_GH0.tar.gz) = 114805 -SHA256 (pkg-xattr-v0.4.3_GH0.tar.gz) = 49dfec6d8d3275293529a0a6fd41e6560aa933d8d13c54c44ff4d5a964c5ceba -SIZE (pkg-xattr-v0.4.3_GH0.tar.gz) = 9440 +SHA256 (pkg-xattr-v0.4.5_GH0.tar.gz) = 2ea3420cacea11f23eaa7cd14848025ec33f6563f91b99582b2fefeb8be8dc47 +SIZE (pkg-xattr-v0.4.5_GH0.tar.gz) = 9390 SHA256 (protocolbuffers-protobuf-go-v1.26.0_GH0.tar.gz) = 26218474bcf776ecf32d7d194c6bfaca8e7b4f0c087e5b595fd50fbb31409676 SIZE (protocolbuffers-protobuf-go-v1.26.0_GH0.tar.gz) = 1270215 SHA256 (restic-chunker-v0.4.0_GH0.tar.gz) = b3ed75a4ea6a05dedb0012897185bcf0f6ec76b7f030b605812623cd00c4bdca diff --git a/sysutils/rubygem-bolt/files/patch-bolt.gemspec b/sysutils/rubygem-bolt/files/patch-bolt.gemspec index 6227470cce7..7fff7d03974 100644 --- a/sysutils/rubygem-bolt/files/patch-bolt.gemspec +++ b/sysutils/rubygem-bolt/files/patch-bolt.gemspec @@ -6,7 +6,7 @@ s.licenses = ["Apache-2.0".freeze] - s.required_ruby_version = Gem::Requirement.new("~> 2.5".freeze) + s.required_ruby_version = Gem::Requirement.new(">= 2.5".freeze) - s.rubygems_version = "3.3.7".freeze + s.rubygems_version = "3.3.9".freeze s.summary = "Execute commands remotely over SSH and WinRM".freeze @@ -27,7 +27,7 @@ Gem::Specification.new do |s| diff --git a/sysutils/rubygem-bundler/Makefile b/sysutils/rubygem-bundler/Makefile index a9c8785df53..7da82eb816e 100644 --- a/sysutils/rubygem-bundler/Makefile +++ b/sysutils/rubygem-bundler/Makefile @@ -1,7 +1,7 @@ # Created by: Robert Gogolok PORTNAME= bundler -DISTVERSION= 2.3.8 +PORTVERSION= 2.3.9 PORTEPOCH= 1 CATEGORIES= sysutils rubygems MASTER_SITES= RG diff --git a/sysutils/rubygem-bundler/distinfo b/sysutils/rubygem-bundler/distinfo index b87ce3070fb..886bc87198c 100644 --- a/sysutils/rubygem-bundler/distinfo +++ b/sysutils/rubygem-bundler/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645976888 -SHA256 (rubygem/bundler-2.3.8.gem) = 3011c4429ec443dcf8da0561f3981b15f1a9665ed956bdcf051688ce10a8f501 -SIZE (rubygem/bundler-2.3.8.gem) = 403968 +TIMESTAMP = 1647264998 +SHA256 (rubygem/bundler-2.3.9.gem) = 55988ab920cfdec4a805750f70f9b01d1fc66d9b38ecd205f99957b474995b38 +SIZE (rubygem/bundler-2.3.9.gem) = 403968 diff --git a/sysutils/rubygem-sys-filesystem/Makefile b/sysutils/rubygem-sys-filesystem/Makefile index e887eb03558..998ec6bdf99 100644 --- a/sysutils/rubygem-sys-filesystem/Makefile +++ b/sysutils/rubygem-sys-filesystem/Makefile @@ -9,6 +9,7 @@ MAINTAINER= ruby@FreeBSD.org COMMENT= Ruby interface for getting filesystem information LICENSE= APACHE20 +LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= rubygem-ffi>=1.1<2:devel/rubygem-ffi diff --git a/sysutils/shlock/Makefile b/sysutils/shlock/Makefile index a4ebeec55f3..d01f1b7b134 100644 --- a/sysutils/shlock/Makefile +++ b/sysutils/shlock/Makefile @@ -1,5 +1,5 @@ PORTNAME= shlock -PORTVERSION= 2.6.4 +PORTVERSION= 2.6.5 CATEGORIES= sysutils MASTER_SITES= ISC/inn DISTNAME= inn-${PORTVERSION} @@ -10,7 +10,6 @@ COMMENT= Create lock files for use in shell scripts LICENSE= ISCL LICENSE_FILE= ${WRKSRC}/LICENSE -ALL_TARGET= all-lib CONFIGURE_ARGS= --disable-shared CONFIGURE_ENV= ac_cv_path_PERL="${TRUE}" GNU_CONFIGURE= yes @@ -18,8 +17,10 @@ GNU_CONFIGURE= yes PLIST_FILES= bin/shlock \ share/man/man1/shlock.1.gz -post-build: - @${DO_MAKE_BUILD} -C ${WRKSRC}/backends shlock +do-build: + ${DO_MAKE_BUILD} -C ${WRKSRC}/include all + ${DO_MAKE_BUILD} -C ${WRKSRC}/lib all + ${DO_MAKE_BUILD} -C ${WRKSRC}/backends shlock do-install: ${INSTALL_PROGRAM} ${WRKSRC}/backends/shlock ${STAGEDIR}${PREFIX}/bin diff --git a/sysutils/shlock/distinfo b/sysutils/shlock/distinfo index d2f48e83f16..9e95d7819aa 100644 --- a/sysutils/shlock/distinfo +++ b/sysutils/shlock/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1611665337 -SHA256 (inn-2.6.4.tar.gz) = f05e803e0b3772b235bfb11b688f1def3d422dbf30ccbbce973f7fe518ac7518 -SIZE (inn-2.6.4.tar.gz) = 2610659 +TIMESTAMP = 1647264444 +SHA256 (inn-2.6.5.tar.gz) = 34236cb34486e8083111821388670e02de5588ac24e819ca4468eafbb7b67598 +SIZE (inn-2.6.5.tar.gz) = 2646269 diff --git a/sysutils/squashfs-tools/Makefile b/sysutils/squashfs-tools/Makefile index 08a953db9de..4bacc6c449e 100644 --- a/sysutils/squashfs-tools/Makefile +++ b/sysutils/squashfs-tools/Makefile @@ -1,10 +1,9 @@ # Created by: Ashish SHUKLA PORTNAME= squashfs-tools -PORTVERSION= 4.4 +PORTVERSION= 4.5.1 CATEGORIES= sysutils -MASTER_SITES= SF/squashfs/squashfs/${DISTNAME}/ -DISTNAME= squashfs${PORTVERSION} +MASTER_SITES= SF/squashfs/squashfs/squashfs${PORTVERSION} DISTFILES= ${DISTNAME}${EXTRACT_SUFX} MAINTAINER= danfe@FreeBSD.org @@ -13,10 +12,14 @@ COMMENT= Set of tools to manipulate squashfs images LICENSE= GPLv2 LICENSE_FILE= ${WRKSRC}/../COPYING -USES= alias cpe gmake +BUILD_DEPENDS= gsed:textproc/gsed -PLIST_FILES= bin/mksquashfs \ - bin/unsquashfs +USES= alias cpe gmake +BINARY_ALIAS= sed=gsed + +PLIST_FILES= bin/mksquashfs bin/sqfscat bin/sqfstar bin/unsquashfs \ + man/man1/mksquashfs.1.gz man/man1/sqfscat.1.gz \ + man/man1/sqfstar.1.gz man/man1/unsquashfs.1.gz OPTIONS_DEFINE= DOCS LZ4 LZMA LZO XZ ZSTD OPTIONS_DEFAULT= LZ4 LZMA LZO XZ ZSTD @@ -28,13 +31,8 @@ WRKSRC= ${WRKDIR}/${DISTNAME}/squashfs-tools CPE_PRODUCT= squashfs CPE_VENDOR= squashfs_project -USE_CSTD= gnu89 - -# Fix build with clang11 -CFLAGS+= -fcommon - -PORTDOCS= ACKNOWLEDGEMENTS CHANGES README-${PORTVERSION} USAGE \ - pseudo-file.example +PORTDOCS= ACKNOWLEDGEMENTS ACTIONS-README CHANGES DONATIONS \ + README-${PORTVERSION} USAGE pseudo-file.example LZ4_LIB_DEPENDS= liblz4.so:archivers/liblz4 LZ4_MAKE_ARGS= LZ4_SUPPORT=1 @@ -51,7 +49,15 @@ XZ_MAKE_ARGS= XZ_SUPPORT=1 ZSTD_LIB_DEPENDS= libzstd.so:archivers/zstd ZSTD_MAKE_ARGS= ZSTD_SUPPORT=1 +post-patch: + @${REINPLACE_CMD} -e '/^#include / { x; \ + s,^,#include ,; G; }' ${WRKSRC}/reader.c + @${REINPLACE_CMD} -e '/^#include "squashfs_fs\.h"/ { x; \ + s,^,typedef struct __sFILE FILE;,; G; }' \ + ${WRKSRC}/lzma_wrapper.c + post-install: + @${MV} ${WRKSRC:H}/RELEASE-READMEs/[Dp]* ${WRKSRC:H} @${MKDIR} ${STAGEDIR}${DOCSDIR} ${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC:H}/|} ${STAGEDIR}${DOCSDIR} diff --git a/sysutils/squashfs-tools/distinfo b/sysutils/squashfs-tools/distinfo index 87d86a7c33e..ab5cc89605c 100644 --- a/sysutils/squashfs-tools/distinfo +++ b/sysutils/squashfs-tools/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1567045250 -SHA256 (squashfs4.4.tar.gz) = a981b3f3f2054b5a2e658851a3c06a2460ad04a9a8a645e0afe063a63fdbb07e -SIZE (squashfs4.4.tar.gz) = 190797 +TIMESTAMP = 1647547267 +SHA256 (squashfs-tools-4.5.1.tar.gz) = 277b6e7f75a4a57f72191295ae62766a10d627a4f5e5f19eadfbc861378deea7 +SIZE (squashfs-tools-4.5.1.tar.gz) = 270112 SHA256 (lzma922.tar.bz2) = 9aade84f229fb25f7aef39d8866b375fe6d35a9e18098d7cd86a99e294902944 SIZE (lzma922.tar.bz2) = 546148 diff --git a/sysutils/squashfs-tools/files/patch-Makefile b/sysutils/squashfs-tools/files/patch-Makefile index 4c70e09f3e7..75b142228bc 100644 --- a/sysutils/squashfs-tools/files/patch-Makefile +++ b/sysutils/squashfs-tools/files/patch-Makefile @@ -1,6 +1,6 @@ ---- Makefile.orig 2019-08-29 01:58:04 UTC +--- Makefile.orig 2022-03-17 19:32:02 UTC +++ Makefile -@@ -97,7 +97,7 @@ COMP_DEFAULT = gzip +@@ -101,7 +101,7 @@ COMP_DEFAULT = gzip # If your C library or build/target environment doesn't support XATTRs then # comment out the next line to build Mksquashfs and Unsquashfs without XATTR # support @@ -9,7 +9,7 @@ # Select whether you wish xattrs to be stored by Mksquashfs and extracted # by Unsquashfs by default. If selected users can disable xattr support by -@@ -105,7 +105,7 @@ XATTR_SUPPORT = 1 +@@ -109,7 +109,7 @@ XATTR_SUPPORT = 1 # # If unselected, Mksquashfs/Unsquashfs won't store and extract xattrs by # default. Users can enable xattrs by using the -xattrs option. @@ -18,7 +18,7 @@ ############################################### -@@ -160,7 +160,7 @@ UNSQUASHFS_OBJS = unsquashfs.o unsquash-1.o unsquash-2 +@@ -177,7 +177,7 @@ UNSQUASHFS_OBJS = unsquashfs.o unsquash-1.o unsquash-2 CFLAGS ?= -O2 CFLAGS += $(EXTRA_CFLAGS) $(INCLUDEDIR) -D_FILE_OFFSET_BITS=64 \ @@ -27,15 +27,15 @@ -Wall LIBS = -lpthread -lm -@@ -200,6 +200,7 @@ endif +@@ -217,6 +217,7 @@ endif ifeq ($(LZO_SUPPORT),1) CFLAGS += -DLZO_SUPPORT +INCLUDEDIR += -I$(LOCALBASE)/include MKSQUASHFS_OBJS += lzo_wrapper.o UNSQUASHFS_OBJS += lzo_wrapper.o - LIBS += $(LZO_LIBDIR) -llzo2 -@@ -208,17 +209,19 @@ endif + LIBS += -llzo2 +@@ -225,17 +226,19 @@ endif ifeq ($(LZ4_SUPPORT),1) CFLAGS += -DLZ4_SUPPORT @@ -57,13 +57,19 @@ COMPRESSORS += zstd endif -@@ -360,6 +363,6 @@ clean: +@@ -417,9 +420,9 @@ clean: .PHONY: install install: mksquashfs unsquashfs - mkdir -p $(INSTALL_DIR) - cp mksquashfs $(INSTALL_DIR) - cp unsquashfs $(INSTALL_DIR) +- ln -fs unsquashfs $(INSTALL_DIR)/sqfscat +- ln -fs mksquashfs $(INSTALL_DIR)/sqfstar +- ../generate-manpages/install-manpages.sh $(shell pwd)/.. "$(INSTALL_MANPAGES_DIR)" + mkdir -p $(DESTDIR)$(PREFIX)/bin + ${BSD_INSTALL_PROGRAM} mksquashfs $(DESTDIR)$(PREFIX)/bin + ${BSD_INSTALL_PROGRAM} unsquashfs $(DESTDIR)$(PREFIX)/bin ++ ln -fs unsquashfs $(DESTDIR)$(INSTALL_DIR)/sqfscat ++ ln -fs mksquashfs $(DESTDIR)$(INSTALL_DIR)/sqfstar ++ ../generate-manpages/install-manpages.sh $(shell pwd)/.. "$(DESTDIR)$(INSTALL_MANPAGES_DIR)" diff --git a/sysutils/tracker-miners/Makefile b/sysutils/tracker-miners/Makefile index 90166399eff..4510db51914 100644 --- a/sysutils/tracker-miners/Makefile +++ b/sysutils/tracker-miners/Makefile @@ -2,7 +2,7 @@ PORTNAME= tracker-miners PORTVERSION= 2.3.5 -PORTREVISION= 19 +PORTREVISION= 20 CATEGORIES= sysutils gnome MASTER_SITES= GNOME diff --git a/sysutils/tracker/Makefile b/sysutils/tracker/Makefile index 192510ea042..47f95398c49 100644 --- a/sysutils/tracker/Makefile +++ b/sysutils/tracker/Makefile @@ -2,7 +2,7 @@ PORTNAME= tracker PORTVERSION= 2.3.4 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= sysutils gnome MASTER_SITES= GNOME diff --git a/sysutils/tracker3/Makefile b/sysutils/tracker3/Makefile index 1cb5c380420..097d19c255d 100644 --- a/sysutils/tracker3/Makefile +++ b/sysutils/tracker3/Makefile @@ -1,6 +1,6 @@ PORTNAME= tracker PORTVERSION= 3.1.2 -PORTREVISION= 1 +PORTREVISION= 3 CATEGORIES= sysutils gnome MASTER_SITES= GNOME PKGNAMESUFFIX= 3 @@ -26,7 +26,6 @@ USE_LDCONFIG= yes MESON_ARGS= -Dbash_completion=true \ -Dbash_completion_dir=${PREFIX}/etc/bash_completion.d \ -Ddocs=false \ - -Dman=false \ -Dstemmer=disabled \ -Dsystemd_user_services=false @@ -40,11 +39,12 @@ OPTIONS_DEFINE= DOCS MAN OPTIONS_SUB= yes DOCS_BUILD_DEPENDS= gtk-doc>0:textproc/gtk-doc DOCS_MESON_TRUE= docs -MAN_BUILD_DEPENDS= asciidoc:textproc/asciidoc \ +MAN_BUILD_DEPENDS= asciidoc:textproc/asciidoc MAN_MESON_TRUE= man +MAN_DESC= Install manual pages post-patch: - ${REINPLACE_CMD} -e 's^/etc/asciidoc^${PREFIX}/etc/asciidoc^' \ + ${REINPLACE_CMD} -e 's|/etc/asciidoc|${PYTHONPREFIX_SITELIBDIR}/asciidoc/resources|' \ ${WRKSRC}/docs/manpages/meson.build .include diff --git a/sysutils/upower/Makefile b/sysutils/upower/Makefile index 87e262933da..fb0a4044d6f 100644 --- a/sysutils/upower/Makefile +++ b/sysutils/upower/Makefile @@ -1,9 +1,10 @@ # Created by: Joe Marcus Clarke PORTNAME= upower -PORTVERSION= 0.99.13 +PORTVERSION= 0.99.17 +DISTVERSIONPREFIX= v CATEGORIES= sysutils -MASTER_SITES= https://gitlab.freedesktop.org/upower/upower/uploads/177df5b9f9b76f25a2ad9da41aa0c1fa/ +MASTER_SITES= https://gitlab.freedesktop.org/upower/upower/-/archive/${DISTVERSIONFULL}/ MAINTAINER= desktop@FreeBSD.org COMMENT= D-Bus daemon for simplifying power management tasks @@ -13,24 +14,26 @@ LICENSE_FILE= ${WRKSRC}/COPYING LIB_DEPENDS= libdbus-glib-1.so:devel/dbus-glib -USES= compiler:c11 gettext gmake gnome libtool localbase pathfix \ - pkgconfig tar:xz +USES= compiler:c11 gettext gnome localbase meson \ + pkgconfig tar:bz2 USE_GNOME= glib20 introspection:build USE_LDCONFIG= yes -GNU_CONFIGURE= yes -CONFIGURE_ARGS= --disable-gtk-doc \ - --localstatedir=/var \ - --with-backend=freebsd \ - --without-idevice -INSTALL_TARGET= install-strip +MESON_ARGS= -Dgtk-doc=false \ + -Dos_backend=freebsd \ + -Dlocalstatedir=/var \ + -Dudevrulesdir=no \ + -Dsystemdsystemunitdir=no \ + -Didevice=disabled +LDFLAGS+= -lkvm OPTIONS_DEFINE= MANPAGES OPTIONS_DEFAULT= MANPAGES OPTIONS_SUB= yes MANPAGES_BUILD_DEPENDS= ${LOCALBASE}/share/xsl/docbook/manpages/docbook.xsl:textproc/docbook-xsl \ docbook-sgml>0:textproc/docbook-sgml \ - docbook-xml>0:textproc/docbook-xml + docbook-xml>0:textproc/docbook-xml \ + gtkdoc-scan:textproc/gtk-doc MANPAGES_USE= GNOME=libxslt:build -MANPAGES_CONFIGURE_ENABLE= man-pages +MANPAGES_MESON_TRUE= man .include diff --git a/sysutils/upower/distinfo b/sysutils/upower/distinfo index d7b7bd9e136..a974c38b5df 100644 --- a/sysutils/upower/distinfo +++ b/sysutils/upower/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1630081471 -SHA256 (upower-0.99.13.tar.xz) = 5cad70f91540cc7dc121cb17e0ad645e5e663c8682f60a7be42ee38cd7b23d7a -SIZE (upower-0.99.13.tar.xz) = 448876 +TIMESTAMP = 1648324308 +SHA256 (upower-v0.99.17.tar.bz2) = de7177deb2ee23d9c505046c74a856564acab8fd1d39aa1541d123af5a99ca8d +SIZE (upower-v0.99.17.tar.bz2) = 139108 diff --git a/sysutils/upower/files/patch-meson.build b/sysutils/upower/files/patch-meson.build new file mode 100644 index 00000000000..9108eefe694 --- /dev/null +++ b/sysutils/upower/files/patch-meson.build @@ -0,0 +1,11 @@ +--- meson.build.orig 2022-03-09 17:46:34 UTC ++++ meson.build +@@ -105,7 +105,7 @@ endif + config_h = configure_file(output: 'config.h', configuration: cdata) + + subdir('etc') +-subdir('rules') ++#subdir('rules') + subdir('po') + subdir('dbus') + subdir('libupower-glib') diff --git a/sysutils/upower/pkg-plist b/sysutils/upower/pkg-plist index be9fb77fb23..d75c1591e6b 100644 --- a/sysutils/upower/pkg-plist +++ b/sysutils/upower/pkg-plist @@ -11,10 +11,9 @@ include/libupower-glib/up-wakeup-item.h include/libupower-glib/up-wakeups.h include/libupower-glib/upower.h lib/girepository-1.0/UPowerGlib-1.0.typelib -lib/libupower-glib.a lib/libupower-glib.so lib/libupower-glib.so.3 -lib/libupower-glib.so.3.0.1 +lib/libupower-glib.so.3.1.0 libdata/pkgconfig/upower-glib.pc libexec/upowerd %%MANPAGES%%man/man1/upower.1.gz diff --git a/sysutils/wimlib/Makefile b/sysutils/wimlib/Makefile index 234f447c26e..61cb7d8dde4 100644 --- a/sysutils/wimlib/Makefile +++ b/sysutils/wimlib/Makefile @@ -1,5 +1,6 @@ PORTNAME= wimlib PORTVERSION= 1.13.0 +PORTREVISION= 1 CATEGORIES= sysutils MASTER_SITES= https://wimlib.net/downloads/ diff --git a/sysutils/xen-tools/Makefile b/sysutils/xen-tools/Makefile index 1d2d3ec85c4..f2b030e49bb 100644 --- a/sysutils/xen-tools/Makefile +++ b/sysutils/xen-tools/Makefile @@ -1,7 +1,7 @@ PORTNAME= xen PKGNAMESUFFIX= -tools PORTVERSION= 4.15.0 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= sysutils emulators MASTER_SITES= http://downloads.xenproject.org/release/xen/${PORTVERSION}/ diff --git a/sysutils/xvidcap/Makefile b/sysutils/xvidcap/Makefile index 5b446465ed5..d14f6b7d0d3 100644 --- a/sysutils/xvidcap/Makefile +++ b/sysutils/xvidcap/Makefile @@ -2,7 +2,7 @@ PORTNAME= xvidcap PORTVERSION= 1.1.7 -PORTREVISION= 13 +PORTREVISION= 14 PORTEPOCH= 1 CATEGORIES= sysutils multimedia MASTER_SITES= SF diff --git a/textproc/Makefile b/textproc/Makefile index 1b871fbb708..cbaf8601fdf 100644 --- a/textproc/Makefile +++ b/textproc/Makefile @@ -1327,6 +1327,7 @@ SUBDIR += py-html2text2018 SUBDIR += py-humanfriendly SUBDIR += py-hunspell + SUBDIR += py-ini2toml SUBDIR += py-isbnlib SUBDIR += py-isbntools SUBDIR += py-iso-639 diff --git a/textproc/R-cran-XML/Makefile b/textproc/R-cran-XML/Makefile index 364650c8730..bb4c1ffaf40 100644 --- a/textproc/R-cran-XML/Makefile +++ b/textproc/R-cran-XML/Makefile @@ -2,6 +2,7 @@ PORTNAME= XML DISTVERSION= 3.99-0.9 +PORTREVISION= 1 CATEGORIES= textproc DISTNAME= ${PORTNAME}_${DISTVERSION} diff --git a/textproc/R-cran-sass/Makefile b/textproc/R-cran-sass/Makefile index 812ee74c4f0..70ef906508b 100644 --- a/textproc/R-cran-sass/Makefile +++ b/textproc/R-cran-sass/Makefile @@ -1,5 +1,5 @@ PORTNAME= sass -PORTVERSION= 0.4.0 +PORTVERSION= 0.4.1 CATEGORIES= textproc DISTNAME= ${PORTNAME}_${PORTVERSION} diff --git a/textproc/R-cran-sass/distinfo b/textproc/R-cran-sass/distinfo index 876363fc8d8..3c3b84b9a6e 100644 --- a/textproc/R-cran-sass/distinfo +++ b/textproc/R-cran-sass/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1621049648 -SHA256 (sass_0.4.0.tar.gz) = 7d06ca15239142a49e88bb3be494515abdd8c75f00f3f1b0ee7bccb55019bc2b -SIZE (sass_0.4.0.tar.gz) = 3022459 +TIMESTAMP = 1648359998 +SHA256 (sass_0.4.1.tar.gz) = 850fcb6bd49085d5afd25ac18da0744234385baf1f13d8c0a320f4da2de608bb +SIZE (sass_0.4.1.tar.gz) = 3022870 diff --git a/textproc/R-cran-xml2/Makefile b/textproc/R-cran-xml2/Makefile index 53e1ea989b5..08c2dae023b 100644 --- a/textproc/R-cran-xml2/Makefile +++ b/textproc/R-cran-xml2/Makefile @@ -2,6 +2,7 @@ PORTNAME= xml2 PORTVERSION= 1.3.3 +PORTREVISION= 1 CATEGORIES= textproc DISTNAME= ${PORTNAME}_${PORTVERSION} diff --git a/textproc/apertium/Makefile b/textproc/apertium/Makefile index da0cc858681..51c6b6c1de4 100644 --- a/textproc/apertium/Makefile +++ b/textproc/apertium/Makefile @@ -2,6 +2,7 @@ PORTNAME= apertium PORTVERSION= 3.8.1 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= textproc diff --git a/textproc/augeas/Makefile b/textproc/augeas/Makefile index 58cecdbd8a7..421669d3962 100644 --- a/textproc/augeas/Makefile +++ b/textproc/augeas/Makefile @@ -2,7 +2,7 @@ PORTNAME= augeas PORTVERSION= 1.12.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= textproc MASTER_SITES= http://download.augeas.net/ diff --git a/textproc/diffmark/Makefile b/textproc/diffmark/Makefile index 7c3aaf05f3c..7bec7658cfe 100644 --- a/textproc/diffmark/Makefile +++ b/textproc/diffmark/Makefile @@ -2,7 +2,7 @@ PORTNAME= diffmark PORTVERSION= 0.10 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= textproc MASTER_SITES= http://www.mangrove.cz/diffmark/ \ http://penguin.cz/~vbarta/diffmark/ diff --git a/textproc/docbook2X/Makefile b/textproc/docbook2X/Makefile index f3be65d8181..f325b1e5511 100644 --- a/textproc/docbook2X/Makefile +++ b/textproc/docbook2X/Makefile @@ -2,7 +2,7 @@ PORTNAME= docbook2X PORTVERSION= 0.8.8 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= textproc MASTER_SITES= SF diff --git a/textproc/ebook-tools/Makefile b/textproc/ebook-tools/Makefile index c67bbbd16a1..964ca4ceb7d 100644 --- a/textproc/ebook-tools/Makefile +++ b/textproc/ebook-tools/Makefile @@ -2,7 +2,7 @@ PORTNAME= ebook-tools PORTVERSION= 0.2.2 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= textproc kde MASTER_SITES= SF diff --git a/textproc/fpc-libxml2/Makefile b/textproc/fpc-libxml2/Makefile index d06bc652019..501258bd38d 100644 --- a/textproc/fpc-libxml2/Makefile +++ b/textproc/fpc-libxml2/Makefile @@ -1,5 +1,6 @@ # Created by: Christoper Key +PORTREVISION= 1 CATEGORIES= textproc lang PKGNAMESUFFIX= -libxml2 diff --git a/textproc/gdome2/Makefile b/textproc/gdome2/Makefile index 3618ea69127..703848c27bf 100644 --- a/textproc/gdome2/Makefile +++ b/textproc/gdome2/Makefile @@ -2,7 +2,7 @@ PORTNAME= gdome2 PORTVERSION= 0.8.1 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= textproc MASTER_SITES= http://gdome2.cs.unibo.it/tarball/ diff --git a/textproc/gmetadom/Makefile b/textproc/gmetadom/Makefile index d441060aa97..4ef5fc1e809 100644 --- a/textproc/gmetadom/Makefile +++ b/textproc/gmetadom/Makefile @@ -2,7 +2,7 @@ PORTNAME= gmetadom PORTVERSION= 0.2.6 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= textproc MASTER_SITES= SF diff --git a/textproc/gspell/Makefile b/textproc/gspell/Makefile index 580c301d4eb..b4622596de6 100644 --- a/textproc/gspell/Makefile +++ b/textproc/gspell/Makefile @@ -2,7 +2,7 @@ PORTNAME= gspell PORTVERSION= 1.9.1 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= textproc MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/textproc/lasem/Makefile b/textproc/lasem/Makefile index b7c0b460e95..94232a771d2 100644 --- a/textproc/lasem/Makefile +++ b/textproc/lasem/Makefile @@ -2,6 +2,7 @@ PORTNAME= lasem PORTVERSION= 0.5.1 +PORTREVISION= 1 CATEGORIES= textproc gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/textproc/libabw/Makefile b/textproc/libabw/Makefile index f0f42ddaeb2..d932847a2ac 100644 --- a/textproc/libabw/Makefile +++ b/textproc/libabw/Makefile @@ -1,5 +1,6 @@ PORTNAME= libabw PORTVERSION= 0.1.3 +PORTREVISION= 1 CATEGORIES= textproc MASTER_SITES= LODEV/${PORTNAME} diff --git a/textproc/libcroco/Makefile b/textproc/libcroco/Makefile index 782e03e4e53..21d0195b764 100644 --- a/textproc/libcroco/Makefile +++ b/textproc/libcroco/Makefile @@ -2,6 +2,7 @@ PORTNAME= libcroco PORTVERSION= 0.6.13 +PORTREVISION= 1 CATEGORIES= textproc gnome MASTER_SITES= GNOME diff --git a/textproc/libe-book/Makefile b/textproc/libe-book/Makefile index 718a9db812a..3568ff59cac 100644 --- a/textproc/libe-book/Makefile +++ b/textproc/libe-book/Makefile @@ -1,6 +1,6 @@ PORTNAME= libe-book PORTVERSION= 0.1.3 -PORTREVISION= 21 +PORTREVISION= 22 CATEGORIES= textproc MASTER_SITES= SF/libebook/${PORTNAME}-${PORTVERSION}/ diff --git a/textproc/libextractor/Makefile b/textproc/libextractor/Makefile index cca75227229..2d20f25fc7e 100644 --- a/textproc/libextractor/Makefile +++ b/textproc/libextractor/Makefile @@ -2,6 +2,7 @@ PORTNAME= libextractor PORTVERSION= 1.11 +PORTREVISION= 1 CATEGORIES= textproc MASTER_SITES= GNU diff --git a/textproc/libfo/Makefile b/textproc/libfo/Makefile index eb9f2ed87af..0457f9810ad 100644 --- a/textproc/libfo/Makefile +++ b/textproc/libfo/Makefile @@ -1,7 +1,7 @@ PORTNAME= libfo PORTVERSION= 0.6.3 DISTVERSIONPREFIX= v -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= textproc MAINTAINER= hrs@FreeBSD.org diff --git a/textproc/libfolia/Makefile b/textproc/libfolia/Makefile index dffd1127ae3..f00db98fa69 100644 --- a/textproc/libfolia/Makefile +++ b/textproc/libfolia/Makefile @@ -1,6 +1,7 @@ PORTNAME= libfolia DISTVERSIONPREFIX= v DISTVERSION= 2.10 +PORTREVISION= 1 CATEGORIES= textproc MAINTAINER= yuri@FreeBSD.org diff --git a/textproc/libgepub/Makefile b/textproc/libgepub/Makefile index 326c73f2701..6a62b1b8545 100644 --- a/textproc/libgepub/Makefile +++ b/textproc/libgepub/Makefile @@ -1,6 +1,6 @@ PORTNAME= libgepub PORTVERSION= 0.6.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= textproc gnome MASTER_SITES= GNOME diff --git a/textproc/liblingoteach/Makefile b/textproc/liblingoteach/Makefile index 3d6be66fcaa..3f9a2381e43 100644 --- a/textproc/liblingoteach/Makefile +++ b/textproc/liblingoteach/Makefile @@ -1,6 +1,6 @@ PORTNAME= liblingoteach PORTVERSION= 0.2.1 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= textproc MASTER_SITES= SF/lingoteach/OldFiles diff --git a/textproc/libodfgen01/Makefile b/textproc/libodfgen01/Makefile index 0f0374b150e..37b42b30edc 100644 --- a/textproc/libodfgen01/Makefile +++ b/textproc/libodfgen01/Makefile @@ -1,5 +1,6 @@ PORTNAME= libodfgen PORTVERSION= 0.1.8 +PORTREVISION= 1 CATEGORIES= textproc MASTER_SITES= SF/libwpd/${PORTNAME}/${PORTNAME}-${PORTVERSION} PKGNAMESUFFIX= 01 diff --git a/textproc/libtranslate/Makefile b/textproc/libtranslate/Makefile index 7cf4e1e9075..28ef846e5fc 100644 --- a/textproc/libtranslate/Makefile +++ b/textproc/libtranslate/Makefile @@ -2,7 +2,7 @@ PORTNAME= libtranslate PORTVERSION= 0.99 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= textproc MASTER_SITES= SAVANNAH DISTFILES= ${DISTNAME}${EXTRACT_SUFX} # May append to this diff --git a/textproc/libvisio01/Makefile b/textproc/libvisio01/Makefile index a68edc8bd7c..8829a93c67c 100644 --- a/textproc/libvisio01/Makefile +++ b/textproc/libvisio01/Makefile @@ -1,6 +1,6 @@ PORTNAME= libvisio PORTVERSION= 0.1.7 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= textproc devel MASTER_SITES= LODEV/${PORTNAME} PKGNAMESUFFIX= 01 diff --git a/textproc/libwpd010/Makefile b/textproc/libwpd010/Makefile index 48a4cca278b..278afd90fba 100644 --- a/textproc/libwpd010/Makefile +++ b/textproc/libwpd010/Makefile @@ -2,7 +2,7 @@ PORTNAME= libwpd PORTVERSION= 0.10.3 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= textproc MASTER_SITES= SF/${PORTNAME}/${PORTNAME}/${PORTNAME}-${PORTVERSION} PKGNAMESUFFIX= 010 diff --git a/textproc/libxml++26/Makefile b/textproc/libxml++26/Makefile index 583597721b9..4a2b1604178 100644 --- a/textproc/libxml++26/Makefile +++ b/textproc/libxml++26/Makefile @@ -3,7 +3,7 @@ PORTNAME= libxml++ PORTVERSION= 2.40.1 -PORTREVISION?= 0 +PORTREVISION?= 1 PORTEPOCH= 1 CATEGORIES= textproc MASTER_SITES= GNOME diff --git a/textproc/lttoolbox/Makefile b/textproc/lttoolbox/Makefile index 859a0e42156..f020774ce0b 100644 --- a/textproc/lttoolbox/Makefile +++ b/textproc/lttoolbox/Makefile @@ -2,6 +2,7 @@ PORTNAME= lttoolbox PORTVERSION= ${APVER}.${APREV} +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= textproc diff --git a/textproc/p5-JSON-Validator/Makefile b/textproc/p5-JSON-Validator/Makefile index e05f6a01830..b05532833fd 100644 --- a/textproc/p5-JSON-Validator/Makefile +++ b/textproc/p5-JSON-Validator/Makefile @@ -1,7 +1,7 @@ # Created by: Adam Weinberger PORTNAME= JSON-Validator -PORTVERSION= 5.05 +PORTVERSION= 5.08 CATEGORIES= textproc perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/textproc/p5-JSON-Validator/distinfo b/textproc/p5-JSON-Validator/distinfo index 9343d32a57e..c099d2564ff 100644 --- a/textproc/p5-JSON-Validator/distinfo +++ b/textproc/p5-JSON-Validator/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641530712 -SHA256 (JSON-Validator-5.05.tar.gz) = cc9cd0c415295104f53ef7ace8422801b2a4274d537521e8dea6e664e5b2b4cd -SIZE (JSON-Validator-5.05.tar.gz) = 110937 +TIMESTAMP = 1648272235 +SHA256 (JSON-Validator-5.08.tar.gz) = 40f6968ed71fc6fd488fa43522dca1939343854092b61fde6601f07166477858 +SIZE (JSON-Validator-5.08.tar.gz) = 112266 diff --git a/textproc/p5-Lingua-Preferred/Makefile b/textproc/p5-Lingua-Preferred/Makefile index 4961e3ec9be..ee77f14675f 100644 --- a/textproc/p5-Lingua-Preferred/Makefile +++ b/textproc/p5-Lingua-Preferred/Makefile @@ -17,4 +17,6 @@ RUN_DEPENDS= p5-Log-TraceMessages>=0:devel/p5-Log-TraceMessages USES= perl5 USE_PERL5= configure +NO_ARCH= yes + .include diff --git a/textproc/p5-XML-CanonicalizeXML/Makefile b/textproc/p5-XML-CanonicalizeXML/Makefile index 11bf73576f3..bb70cc9fcbe 100644 --- a/textproc/p5-XML-CanonicalizeXML/Makefile +++ b/textproc/p5-XML-CanonicalizeXML/Makefile @@ -2,6 +2,7 @@ PORTNAME= XML-CanonicalizeXML PORTVERSION= 0.10 +PORTREVISION= 1 CATEGORIES= textproc perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/textproc/p5-XML-LibXSLT/Makefile b/textproc/p5-XML-LibXSLT/Makefile index 3ec71394b4d..cfe3fb8e059 100644 --- a/textproc/p5-XML-LibXSLT/Makefile +++ b/textproc/p5-XML-LibXSLT/Makefile @@ -2,6 +2,7 @@ PORTNAME= XML-LibXSLT PORTVERSION= 1.99 +PORTREVISION= 1 CATEGORIES= textproc perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/textproc/p5-XML-Liberal/Makefile b/textproc/p5-XML-Liberal/Makefile index e0fd1ebf04e..fb402e5dc83 100644 --- a/textproc/p5-XML-Liberal/Makefile +++ b/textproc/p5-XML-Liberal/Makefile @@ -1,8 +1,7 @@ # Created by: Gea-Suan Lin PORTNAME= XML-Liberal -PORTVERSION= 0.30 -PORTREVISION= 1 +PORTVERSION= 0.32 CATEGORIES= textproc perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/textproc/p5-XML-Liberal/distinfo b/textproc/p5-XML-Liberal/distinfo index 1d2f6fabb28..52583846f15 100644 --- a/textproc/p5-XML-Liberal/distinfo +++ b/textproc/p5-XML-Liberal/distinfo @@ -1,2 +1,3 @@ -SHA256 (XML-Liberal-0.30.tar.gz) = 3cea179da95e598d042866d522ef48306b468b7d02fd91860e2115c9fce3aed9 -SIZE (XML-Liberal-0.30.tar.gz) = 130082 +TIMESTAMP = 1648189885 +SHA256 (XML-Liberal-0.32.tar.gz) = a9e81db3d7cc4790cac8ce61dce59001b3dd9e8accf554f0309c8fdde9a52b99 +SIZE (XML-Liberal-0.32.tar.gz) = 96736 diff --git a/textproc/php74-xmlreader/Makefile b/textproc/php74-xmlreader/Makefile index 9a52d8b7a27..b4e2c285be8 100644 --- a/textproc/php74-xmlreader/Makefile +++ b/textproc/php74-xmlreader/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php74 diff --git a/textproc/php74-xsl/Makefile b/textproc/php74-xsl/Makefile index d86ab265bfc..20071ed9ef0 100644 --- a/textproc/php74-xsl/Makefile +++ b/textproc/php74-xsl/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php74 diff --git a/textproc/php80-dom/Makefile b/textproc/php80-dom/Makefile index b91d1ceaee6..2158f7dc83c 100644 --- a/textproc/php80-dom/Makefile +++ b/textproc/php80-dom/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php80 diff --git a/textproc/php80-simplexml/Makefile b/textproc/php80-simplexml/Makefile index c8bbfd273d8..2bcd86b21c6 100644 --- a/textproc/php80-simplexml/Makefile +++ b/textproc/php80-simplexml/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php80 diff --git a/textproc/php80-xml/Makefile b/textproc/php80-xml/Makefile index 343ee8b24d5..1cd222bd062 100644 --- a/textproc/php80-xml/Makefile +++ b/textproc/php80-xml/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php80 diff --git a/textproc/php80-xmlreader/Makefile b/textproc/php80-xmlreader/Makefile index 870b8cc7314..7996c3ade59 100644 --- a/textproc/php80-xmlreader/Makefile +++ b/textproc/php80-xmlreader/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php80 diff --git a/textproc/php80-xmlwriter/Makefile b/textproc/php80-xmlwriter/Makefile index c98b7578e0b..8c08a4766eb 100644 --- a/textproc/php80-xmlwriter/Makefile +++ b/textproc/php80-xmlwriter/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php80 diff --git a/textproc/php80-xsl/Makefile b/textproc/php80-xsl/Makefile index 5b29c0a1c9a..0b41a3e200b 100644 --- a/textproc/php80-xsl/Makefile +++ b/textproc/php80-xsl/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php80 diff --git a/textproc/php81-dom/Makefile b/textproc/php81-dom/Makefile index d6610e9682c..18ce3512cdb 100644 --- a/textproc/php81-dom/Makefile +++ b/textproc/php81-dom/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php81 diff --git a/textproc/php81-simplexml/Makefile b/textproc/php81-simplexml/Makefile index 4771fd3f274..171dd964ccb 100644 --- a/textproc/php81-simplexml/Makefile +++ b/textproc/php81-simplexml/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php81 diff --git a/textproc/php81-xml/Makefile b/textproc/php81-xml/Makefile index 6baacc4611c..0800edeb41e 100644 --- a/textproc/php81-xml/Makefile +++ b/textproc/php81-xml/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php81 diff --git a/textproc/php81-xmlreader/Makefile b/textproc/php81-xmlreader/Makefile index 2a5922ee682..414e7cc8b14 100644 --- a/textproc/php81-xmlreader/Makefile +++ b/textproc/php81-xmlreader/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php81 diff --git a/textproc/php81-xmlwriter/Makefile b/textproc/php81-xmlwriter/Makefile index e0cadc6a76b..9d8addc95c9 100644 --- a/textproc/php81-xmlwriter/Makefile +++ b/textproc/php81-xmlwriter/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php81 diff --git a/textproc/php81-xsl/Makefile b/textproc/php81-xsl/Makefile index e15435fa304..fe4f8282559 100644 --- a/textproc/php81-xsl/Makefile +++ b/textproc/php81-xsl/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= textproc MASTERDIR= ${.CURDIR}/../../lang/php81 diff --git a/textproc/py-cmarkgfm/Makefile b/textproc/py-cmarkgfm/Makefile index 1f4daa6110a..cd2bb5df77e 100644 --- a/textproc/py-cmarkgfm/Makefile +++ b/textproc/py-cmarkgfm/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= cmarkgfm -PORTVERSION= 0.7.0 +PORTVERSION= 0.8.0 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/textproc/py-cmarkgfm/distinfo b/textproc/py-cmarkgfm/distinfo index 138bfccf048..638fc44ec95 100644 --- a/textproc/py-cmarkgfm/distinfo +++ b/textproc/py-cmarkgfm/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641046004 -SHA256 (cmarkgfm-0.7.0.tar.gz) = a65c152f9da8bf82714f19c287fe0968ee8e7f899e8d94503d73d0fe57229c19 -SIZE (cmarkgfm-0.7.0.tar.gz) = 140573 +TIMESTAMP = 1647264674 +SHA256 (cmarkgfm-0.8.0.tar.gz) = fe859b0e98d222b4b458022007daac2757e970f0671d45e55d01931c1b4bb3cb +SIZE (cmarkgfm-0.8.0.tar.gz) = 140887 diff --git a/textproc/py-cssselect2/Makefile b/textproc/py-cssselect2/Makefile index 5e8bb7841cf..247d07fdd8d 100644 --- a/textproc/py-cssselect2/Makefile +++ b/textproc/py-cssselect2/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= cssselect2 -PORTVERSION= 0.4.1 +PORTVERSION= 0.5.0 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -15,9 +15,12 @@ LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}tinycss2>=0:textproc/py-tinycss2@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}webencodings>=0:converters/py-webencodings@${PY_FLAVOR} -USES= python:3.6+ +USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +post-patch: + @${SED} -e 's|%%PORTVERSION%%|${PORTVERSION}|' ${FILESDIR}/setup.py > ${WRKSRC}/setup.py + .include diff --git a/textproc/py-cssselect2/distinfo b/textproc/py-cssselect2/distinfo index 9a50b8e9ee6..0a873d21857 100644 --- a/textproc/py-cssselect2/distinfo +++ b/textproc/py-cssselect2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1604680192 -SHA256 (cssselect2-0.4.1.tar.gz) = 93fbb9af860e95dd40bf18c3b2b6ed99189a07c0f29ba76f9c5be71344664ec8 -SIZE (cssselect2-0.4.1.tar.gz) = 32756 +TIMESTAMP = 1647264676 +SHA256 (cssselect2-0.5.0.tar.gz) = d98a7bbdd8ebc46093279195d669a3359bd5a23f90c19e82c19d9eeef333e617 +SIZE (cssselect2-0.5.0.tar.gz) = 34918 diff --git a/textproc/py-cssselect2/files/setup.py b/textproc/py-cssselect2/files/setup.py new file mode 100644 index 00000000000..9cbdcc2f99f --- /dev/null +++ b/textproc/py-cssselect2/files/setup.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# setup.py generated by flit for tools that don't yet use PEP 517 + +from distutils.core import setup + +packages = \ +['cssselect2'] + +package_data = \ +{'': ['*']} + +install_requires = \ +['tinycss2', 'webencodings'] + +extras_require = \ +{'doc': ['sphinx', 'sphinx_rtd_theme'], + 'test': ['pytest', + 'pytest-cov', + 'pytest-flake8', + 'pytest-isort', + 'coverage[toml]']} + +setup(name='cssselect2', + version='%%PORTVERSION%%', + description='CSS selectors for Python ElementTree', + author=None, + author_email='Simon Sapin ', + url=None, + packages=packages, + package_data=package_data, + install_requires=install_requires, + extras_require=extras_require, + python_requires='>=3.7', + ) diff --git a/textproc/py-elasticsearch/Makefile b/textproc/py-elasticsearch/Makefile index 606ba3febd7..16a4b9268b2 100644 --- a/textproc/py-elasticsearch/Makefile +++ b/textproc/py-elasticsearch/Makefile @@ -1,7 +1,7 @@ # Created by: Ryan Steinmetz PORTNAME= elasticsearch -PORTVERSION= 7.17.0 +PORTVERSION= 7.17.1 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/textproc/py-elasticsearch/distinfo b/textproc/py-elasticsearch/distinfo index bde21b94c00..4dad18be92b 100644 --- a/textproc/py-elasticsearch/distinfo +++ b/textproc/py-elasticsearch/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058132 -SHA256 (elasticsearch-7.17.0.tar.gz) = 406013783cc36af28ea36cd58cf184cd646530e47eea41336b386322fb878b59 -SIZE (elasticsearch-7.17.0.tar.gz) = 241536 +TIMESTAMP = 1647264678 +SHA256 (elasticsearch-7.17.1.tar.gz) = 0061c49c3d4202802c09fba6734e48699a47cfeb737747acecc932ea73b7d318 +SIZE (elasticsearch-7.17.1.tar.gz) = 247395 diff --git a/textproc/py-ini2toml/Makefile b/textproc/py-ini2toml/Makefile new file mode 100644 index 00000000000..cf923dba212 --- /dev/null +++ b/textproc/py-ini2toml/Makefile @@ -0,0 +1,22 @@ +# Created by: Po-Chuan Hsieh + +PORTNAME= ini2toml +PORTVERSION= 0.10 +CATEGORIES= textproc python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= sunpoet@FreeBSD.org +COMMENT= Automatically conversion of .ini/.cfg files to TOML equivalents + +LICENSE= MPL20 +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}packaging>=20.7:devel/py-packaging@${PY_FLAVOR} + +USES= python:3.7+ +USE_PYTHON= autoplist concurrent distutils + +NO_ARCH= yes + +.include diff --git a/textproc/py-ini2toml/distinfo b/textproc/py-ini2toml/distinfo new file mode 100644 index 00000000000..d4cffdca416 --- /dev/null +++ b/textproc/py-ini2toml/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1648219977 +SHA256 (ini2toml-0.10.tar.gz) = b34983939b396b5ee01e5591c1b7c1ee62d263178ae95e04a99ab7c3c82918f6 +SIZE (ini2toml-0.10.tar.gz) = 99836 diff --git a/textproc/py-ini2toml/pkg-descr b/textproc/py-ini2toml/pkg-descr new file mode 100644 index 00000000000..e8d6b882c18 --- /dev/null +++ b/textproc/py-ini2toml/pkg-descr @@ -0,0 +1,10 @@ +The original purpose of this project is to help migrating setup.cfg files to PEP +621, but by extension it can also be used to convert any compatible .ini/.cfg +file to TOML. + +Please notice, the provided .ini/.cfg files should follow the same syntax +supported by Python's ConfigParser library (here referred to as INI syntax) and +more specifically abide by ConfigUpdater restrictions (e.g., no interpolation or +repeated fields). + +WWW: https://github.com/abravalheri/ini2toml diff --git a/textproc/py-isbnlib/Makefile b/textproc/py-isbnlib/Makefile index a3f483159c6..79f87dfb911 100644 --- a/textproc/py-isbnlib/Makefile +++ b/textproc/py-isbnlib/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= isbnlib -PORTVERSION= 3.10.9 +PORTVERSION= 3.10.10 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/textproc/py-isbnlib/distinfo b/textproc/py-isbnlib/distinfo index 309368859eb..cf67aa78cf2 100644 --- a/textproc/py-isbnlib/distinfo +++ b/textproc/py-isbnlib/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1636117292 -SHA256 (isbnlib-3.10.9.tar.gz) = 3f8187eb8eb6fa027d26ff0775f2abd420e29ce332794bf552a86a6bdba470b8 -SIZE (isbnlib-3.10.9.tar.gz) = 62641 +TIMESTAMP = 1647264680 +SHA256 (isbnlib-3.10.10.tar.gz) = c9e6c1dcaa9dff195429373cf2beb3117f30b3fca43d7db5aec5a2d1f6f59784 +SIZE (isbnlib-3.10.10.tar.gz) = 62764 diff --git a/textproc/py-markdown/Makefile b/textproc/py-markdown/Makefile index 110d4958dd6..57187819794 100644 --- a/textproc/py-markdown/Makefile +++ b/textproc/py-markdown/Makefile @@ -1,7 +1,7 @@ # Created by: Graham Todd PORTNAME= markdown -PORTVERSION= 3.3.4 +PORTVERSION= 3.3.6 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,19 +13,21 @@ COMMENT= Python implementation of Markdown LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE.md -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}importlib-metadata>0:devel/py-importlib-metadata@${PY_FLAVOR} -TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}nose>0:devel/py-nose@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}coverage>0:devel/py-coverage@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pytidylib>0:textproc/py-pytidylib@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pygments>0:textproc/py-pygments@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}yaml>0:devel/py-yaml@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}coverage>=0:devel/py-coverage@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}yaml>=0:devel/py-yaml@${PY_FLAVOR} USES= python:3.6+ -USE_PYTHON= distutils concurrent autoplist +USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes -do-test: - @cd ${WRKSRC} && ${PYTHON_CMD} ${PYDISTUTILS_SETUP} test +.include -.include +.if ${PYTHON_REL} < 31000 +RUN_DEPENDS+= ${PYTHON_PKGNAMEPREFIX}importlib-metadata>=4.4:devel/py-importlib-metadata@${PY_FLAVOR} +.endif + +do-test: + cd ${WRKSRC} && ${PYTHON_CMD} -m unittest -v + +.include diff --git a/textproc/py-markdown/distinfo b/textproc/py-markdown/distinfo index 149882e34f0..74bd55272ce 100644 --- a/textproc/py-markdown/distinfo +++ b/textproc/py-markdown/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1618638255 -SHA256 (Markdown-3.3.4.tar.gz) = 31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49 -SIZE (Markdown-3.3.4.tar.gz) = 322192 +TIMESTAMP = 1647264682 +SHA256 (Markdown-3.3.6.tar.gz) = 76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006 +SIZE (Markdown-3.3.6.tar.gz) = 323584 diff --git a/textproc/py-markdown/files/patch-setup.py b/textproc/py-markdown/files/patch-setup.py deleted file mode 100644 index 49128bf8891..00000000000 --- a/textproc/py-markdown/files/patch-setup.py +++ /dev/null @@ -1,10 +0,0 @@ ---- setup.py.orig 2021-02-24 19:57:42 UTC -+++ setup.py -@@ -129,5 +129,6 @@ setup( - 'Topic :: Text Processing :: Filters', - 'Topic :: Text Processing :: Markup :: HTML', - 'Topic :: Text Processing :: Markup :: Markdown' -- ] -+ ], -+ test_suite='nose.collector', - ) diff --git a/textproc/py-markdown/pkg-descr b/textproc/py-markdown/pkg-descr index 51d400b926c..2bb51847884 100644 --- a/textproc/py-markdown/pkg-descr +++ b/textproc/py-markdown/pkg-descr @@ -1,13 +1,18 @@ -This is a Python implementation of John Gruber's Markdown. It is almost -completely compliant with the reference implementation, though there are a -few very minor differences. +Python-Markdown is a Python implementation of John Gruber's Markdown. It is +almost completely compliant with the reference implementation, though there are +a few known issues. -Features: +In addition to the basic markdown syntax, Python-Markdown supports the following +features: +- International Input: Python-Markdown will accept input in any language + supported by Unicode including bi-directional text. In fact the test suite + includes documents written in Russian and Arabic. +- Extensions: Various extensions are provided (including extra) to change and/or + extend the base syntax. Additionally, a public Extension API is available to + write your own extensions. +- Output Formats: Python-Markdown can output documents with either HTML or XHTML + style tags. See the Library Reference for details. +- Command Line Interface: In addition to being a Python Library, a command line + script is available for your convenience. - * International Input - * Extensions - * Output Formats - * "Safe Modes" - * Command Line Interface - -WWW: https://pythonhosted.org/Markdown/ +WWW: https://github.com/Python-Markdown/markdown diff --git a/textproc/py-mkdocs-material/Makefile b/textproc/py-mkdocs-material/Makefile index e5f4a91114d..540c482dd16 100644 --- a/textproc/py-mkdocs-material/Makefile +++ b/textproc/py-mkdocs-material/Makefile @@ -1,5 +1,5 @@ PORTNAME= mkdocs-material -DISTVERSION= 8.2.5 +DISTVERSION= 8.2.7 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,7 +10,7 @@ COMMENT= Material Design theme for MkDocs LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}Jinja2>=2.11.1:devel/py-Jinja2@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}Jinja2>=2.11.1<3.1:devel/py-Jinja2@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}markdown>=3.2:textproc/py-markdown@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}mkdocs>=1.2.3:textproc/py-mkdocs@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}mkdocs-material-extensions>=1.0:textproc/py-mkdocs-material-extensions@${PY_FLAVOR} \ diff --git a/textproc/py-mkdocs-material/distinfo b/textproc/py-mkdocs-material/distinfo index a37f36fc03f..a9aa578cdfe 100644 --- a/textproc/py-mkdocs-material/distinfo +++ b/textproc/py-mkdocs-material/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646669684 -SHA256 (mkdocs-material-8.2.5.tar.gz) = cdfb39edc77f5c2bbfca02b4fc3764de38b2093eff121aed37674204439901ce -SIZE (mkdocs-material-8.2.5.tar.gz) = 1971829 +TIMESTAMP = 1648231302 +SHA256 (mkdocs-material-8.2.7.tar.gz) = 3314d94ccc11481b1a3aa4f7babb4fb2bc47daa2fa8ace2463665952116f409b +SIZE (mkdocs-material-8.2.7.tar.gz) = 2044170 diff --git a/textproc/py-pybtex/Makefile b/textproc/py-pybtex/Makefile index 9629b39dd3d..326e3c2f356 100644 --- a/textproc/py-pybtex/Makefile +++ b/textproc/py-pybtex/Makefile @@ -2,6 +2,7 @@ PORTNAME= pybtex PORTVERSION= 0.24.0 +PORTREVISION= 1 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/textproc/py-pybtex/files/patch-setup.py b/textproc/py-pybtex/files/patch-setup.py new file mode 100644 index 00000000000..83eeab85826 --- /dev/null +++ b/textproc/py-pybtex/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2021-01-17 19:26:34 UTC ++++ setup.py +@@ -92,7 +92,7 @@ setup( + install_requires=install_requires, + extras_require=extras_require, + python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*', +- packages=find_packages(exclude=['docs']), ++ packages=find_packages(exclude=['docs', 'tests*']), + include_package_data=True, + cmdclass={'sdist': Sdist}, + entry_points={ diff --git a/textproc/py-python-lsp-server/Makefile b/textproc/py-python-lsp-server/Makefile index dcd0f30e822..34b3e88e489 100644 --- a/textproc/py-python-lsp-server/Makefile +++ b/textproc/py-python-lsp-server/Makefile @@ -1,7 +1,7 @@ # Created by: Rainer Hurling PORTNAME= python-lsp-server -PORTVERSION= 1.4.0 +PORTVERSION= 1.4.1 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/textproc/py-python-lsp-server/distinfo b/textproc/py-python-lsp-server/distinfo index 1d8945b99a3..c905a061e63 100644 --- a/textproc/py-python-lsp-server/distinfo +++ b/textproc/py-python-lsp-server/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647110421 -SHA256 (python-lsp-server-1.4.0.tar.gz) = 769142c07573f6b66e930cbd7c588b826082550bef6267bb0aec63e7b6260009 -SIZE (python-lsp-server-1.4.0.tar.gz) = 75443 +TIMESTAMP = 1648396933 +SHA256 (python-lsp-server-1.4.1.tar.gz) = be7f83298af9f0951a93972cafc9db04fd7cf5c05f20812515275f0ba70e342f +SIZE (python-lsp-server-1.4.1.tar.gz) = 76416 diff --git a/textproc/py-reverend/files/patch-2to3 b/textproc/py-reverend/files/patch-2to3 new file mode 100644 index 00000000000..6015f1f691c --- /dev/null +++ b/textproc/py-reverend/files/patch-2to3 @@ -0,0 +1,93 @@ +--- reverend/ui/trainer.py.orig 2004-12-01 17:49:04 UTC ++++ reverend/ui/trainer.py +@@ -4,15 +4,15 @@ + # License as published by the Free Software Foundation. + # + +-from Tkinter import * +-import tkFileDialog +-import tkSimpleDialog +-import tkMessageBox ++from tkinter import * ++import tkinter.filedialog ++import tkinter.simpledialog ++import tkinter.messagebox + + import os + +-from util import Command, StatusBar, Notebook +-from tester import TestView ++from .util import Command, StatusBar, Notebook ++from .tester import TestView + + class PoolView(Frame): + def __init__(self, master=None, guesser=None, app=None): +@@ -79,20 +79,20 @@ class PoolView(Frame): + self.model[name]=(pool, uTok, tTok, train) + + def refresh(self): +- for pool, ut, tt, train in self.model.values(): ++ for pool, ut, tt, train in list(self.model.values()): + ut.set(len(pool)) + tt.set(pool.tokenCount) + train.set(pool.trainCount) + + def save(self): +- path = tkFileDialog.asksaveasfilename() ++ path = tkinter.filedialog.asksaveasfilename() + if not path: + return + self.guesser.save(path) + self.app.dirty = False + + def load(self): +- path = tkFileDialog.askopenfilename() ++ path = tkinter.filedialog.askopenfilename() + if not path: + return + self.guesser.load(path) +@@ -100,11 +100,11 @@ class PoolView(Frame): + self.app.dirty = False + + def newPool(self): +- p = tkSimpleDialog.askstring('Create Pool', 'Name for new pool?') ++ p = tkinter.simpledialog.askstring('Create Pool', 'Name for new pool?') + if not p: + return + if p in self.guesser.pools: +- tkMessageBox.showwarning('Bad pool name!', 'Pool %s already exists.' % p) ++ tkinter.messagebox.showwarning('Bad pool name!', 'Pool %s already exists.' % p) + self.guesser.newPool(p) + self.reload() + self.app.poolAdded() +@@ -193,7 +193,7 @@ class Trainer(Frame): + + + def loadCorpus(self): +- path = tkFileDialog.askdirectory() ++ path = tkinter.filedialog.askdirectory() + if not path: + return + self.loadFileList(path) +@@ -203,10 +203,10 @@ class Trainer(Frame): + def bulkTest(self): + dirs = [] + for pool in self.guesser.poolNames(): +- path = tkFileDialog.askdirectory() ++ path = tkinter.filedialog.askdirectory() + dirs.append((pool, path)) + for pool, path in dirs: +- print pool, path ++ print(pool, path) + + + def displayList(self): +@@ -253,7 +253,7 @@ class Trainer(Frame): + + def quitNow(self): + if self.dirty: +- if tkMessageBox.askyesno("You have unsaved changes!", "Quit without saving?"): ++ if tkinter.messagebox.askyesno("You have unsaved changes!", "Quit without saving?"): + self.quit() + self.quit() + diff --git a/textproc/py-sphinx-tabs/Makefile b/textproc/py-sphinx-tabs/Makefile index 68a6c350bdf..13d83b5cd43 100644 --- a/textproc/py-sphinx-tabs/Makefile +++ b/textproc/py-sphinx-tabs/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= sphinx-tabs -PORTVERSION= 3.2.0 +PORTVERSION= 3.3.0 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,7 +12,7 @@ COMMENT= Tabbed views for Sphinx LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}docutils>=0.16,1<1,1:textproc/py-docutils@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}docutils>=0.17.0,1<0.18,1:textproc/py-docutils@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pygments>=0:textproc/py-pygments@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}sphinx>=2,1<5,1:textproc/py-sphinx@${PY_FLAVOR} TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}docutils>=0,1:textproc/py-docutils@${PY_FLAVOR} \ diff --git a/textproc/py-sphinx-tabs/distinfo b/textproc/py-sphinx-tabs/distinfo index 547a4f949ce..63fdcf04ac5 100644 --- a/textproc/py-sphinx-tabs/distinfo +++ b/textproc/py-sphinx-tabs/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1628439066 -SHA256 (sphinx-tabs-3.2.0.tar.gz) = 33137914ed9b276e6a686d7a337310ee77b1dae316fdcbce60476913a152e0a4 -SIZE (sphinx-tabs-3.2.0.tar.gz) = 15162 +TIMESTAMP = 1647264684 +SHA256 (sphinx-tabs-3.3.0.tar.gz) = 3ffab6695b8b7cf252616af1a974e4d3c2d60f633debff373535c008e1e89f7f +SIZE (sphinx-tabs-3.3.0.tar.gz) = 15498 diff --git a/textproc/py-sphinx-tabs/files/patch-setup.py b/textproc/py-sphinx-tabs/files/patch-setup.py deleted file mode 100644 index 9de15e1c573..00000000000 --- a/textproc/py-sphinx-tabs/files/patch-setup.py +++ /dev/null @@ -1,11 +0,0 @@ ---- setup.py.orig 2021-05-11 20:38:43 UTC -+++ setup.py -@@ -25,7 +25,7 @@ setup( - url="https://github.com/executablebooks/sphinx-tabs", - license="MIT", - python_requires="~=3.6", -- install_requires=["sphinx>=2,<5", "pygments", "docutils~=0.16.0"], -+ install_requires=["sphinx>=2,<5", "pygments", "docutils~=0.16"], - extras_require={ - "testing": [ - "coverage", diff --git a/textproc/py-stemming/files/patch-2to3 b/textproc/py-stemming/files/patch-2to3 new file mode 100644 index 00000000000..2bca3a4bd2e --- /dev/null +++ b/textproc/py-stemming/files/patch-2to3 @@ -0,0 +1,11 @@ +--- stemming/porter.py.orig 2010-02-08 23:06:43 UTC ++++ stemming/porter.py +@@ -173,7 +173,7 @@ def stem(w): + return w + + if __name__ == '__main__': +- print stem("fundamentally") ++ print(stem("fundamentally")) + + + diff --git a/textproc/py-tinycss/Makefile b/textproc/py-tinycss/Makefile index 4bac3a30c14..745c63c0970 100644 --- a/textproc/py-tinycss/Makefile +++ b/textproc/py-tinycss/Makefile @@ -12,4 +12,6 @@ COMMENT= Complete yet simple CSS parser for Python USES= python:3.6+ USE_PYTHON= distutils autoplist +NO_ARCH= yes + .include diff --git a/textproc/py-wordcloud/files/patch-wordcloud_query__integral__image.c b/textproc/py-wordcloud/files/patch-wordcloud_query__integral__image.c new file mode 100644 index 00000000000..13eaff49342 --- /dev/null +++ b/textproc/py-wordcloud/files/patch-wordcloud_query__integral__image.c @@ -0,0 +1,82 @@ +--- wordcloud/query_integral_image.c.orig 2018-07-26 16:13:02 UTC ++++ wordcloud/query_integral_image.c +@@ -15445,7 +15445,9 @@ static PyTypeObject __pyx_type___pyx_array = { + sizeof(struct __pyx_array_obj), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_array, /*tp_dealloc*/ ++#if PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ ++#endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 +@@ -15553,7 +15555,9 @@ static PyTypeObject __pyx_type___pyx_MemviewEnum = { + sizeof(struct __pyx_MemviewEnum_obj), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_Enum, /*tp_dealloc*/ ++#if PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ ++#endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 +@@ -15803,7 +15807,9 @@ static PyTypeObject __pyx_type___pyx_memoryview = { + sizeof(struct __pyx_memoryview_obj), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_memoryview, /*tp_dealloc*/ ++#if PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ ++#endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 +@@ -15930,7 +15936,9 @@ static PyTypeObject __pyx_type___pyx_memoryviewslice = + sizeof(struct __pyx_memoryviewslice_obj), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc__memoryviewslice, /*tp_dealloc*/ ++#if PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ ++#endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 +@@ -16519,13 +16527,17 @@ static int __Pyx_modinit_type_init_code(void) { + /*--- Type init code ---*/ + __pyx_vtabptr_array = &__pyx_vtable_array; + __pyx_vtable_array.get_memview = (PyObject *(*)(struct __pyx_array_obj *))__pyx_array_get_memview; ++#if PY_VERSION_HEX < 0x03090000 + if (PyType_Ready(&__pyx_type___pyx_array) < 0) __PYX_ERR(1, 104, __pyx_L1_error) + __pyx_type___pyx_array.tp_print = 0; ++#endif + if (__Pyx_SetVtable(__pyx_type___pyx_array.tp_dict, __pyx_vtabptr_array) < 0) __PYX_ERR(1, 104, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type___pyx_array) < 0) __PYX_ERR(1, 104, __pyx_L1_error) + __pyx_array_type = &__pyx_type___pyx_array; ++#if PY_VERSION_HEX < 0x03090000 + if (PyType_Ready(&__pyx_type___pyx_MemviewEnum) < 0) __PYX_ERR(1, 278, __pyx_L1_error) + __pyx_type___pyx_MemviewEnum.tp_print = 0; ++#endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type___pyx_MemviewEnum.tp_dictoffset && __pyx_type___pyx_MemviewEnum.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type___pyx_MemviewEnum.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } +@@ -16539,8 +16551,10 @@ static int __Pyx_modinit_type_init_code(void) { + __pyx_vtable_memoryview.setitem_indexed = (PyObject *(*)(struct __pyx_memoryview_obj *, PyObject *, PyObject *))__pyx_memoryview_setitem_indexed; + __pyx_vtable_memoryview.convert_item_to_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *))__pyx_memoryview_convert_item_to_object; + __pyx_vtable_memoryview.assign_item_from_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *, PyObject *))__pyx_memoryview_assign_item_from_object; ++#if PY_VERSION_HEX < 0x03090000 + if (PyType_Ready(&__pyx_type___pyx_memoryview) < 0) __PYX_ERR(1, 329, __pyx_L1_error) + __pyx_type___pyx_memoryview.tp_print = 0; ++#endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type___pyx_memoryview.tp_dictoffset && __pyx_type___pyx_memoryview.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type___pyx_memoryview.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } +@@ -16552,8 +16566,10 @@ static int __Pyx_modinit_type_init_code(void) { + __pyx_vtable__memoryviewslice.__pyx_base.convert_item_to_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *))__pyx_memoryviewslice_convert_item_to_object; + __pyx_vtable__memoryviewslice.__pyx_base.assign_item_from_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *, PyObject *))__pyx_memoryviewslice_assign_item_from_object; + __pyx_type___pyx_memoryviewslice.tp_base = __pyx_memoryview_type; ++#if PY_VERSION_HEX < 0x03090000 + if (PyType_Ready(&__pyx_type___pyx_memoryviewslice) < 0) __PYX_ERR(1, 960, __pyx_L1_error) + __pyx_type___pyx_memoryviewslice.tp_print = 0; ++#endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type___pyx_memoryviewslice.tp_dictoffset && __pyx_type___pyx_memoryviewslice.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type___pyx_memoryviewslice.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } diff --git a/textproc/py-wordnet/Makefile b/textproc/py-wordnet/Makefile index daa8fc6b94a..3fa937b7cef 100644 --- a/textproc/py-wordnet/Makefile +++ b/textproc/py-wordnet/Makefile @@ -14,7 +14,7 @@ DISTFILES= pywordnet-${PORTVERSION}.tar.gz:pywn \ MAINTAINER= ports@FreeBSD.org COMMENT= Python Interface to WordNet -USES= python:3.6+ +USES= dos2unix python:3.6+ USE_PYTHON= distutils autoplist WRKSRC= ${WRKDIR}/py${PORTNAME}-${PORTVERSION} @@ -29,6 +29,8 @@ WN_DICTFILES= adj.exc adv.exc cntlist cntlist.rev data.adj data.adv \ verb.exc post-patch: +# concordance.py uses CR line terminators which cannot be fixed by USES=dos2unix + @${CP} ${FILESDIR}/concordance.py ${WRKSRC}/concordance.py @${SED} -e 's|/usr/local/wordnet2.0|${PREFIX}/${WORDNETDATA}|g' \ -e 's|os.path.join(WNHOME.*))|WNHOME)|g' ${WRKSRC}/wordnet.py \ > ${WRKSRC}/wordnet.py.tmp && ${CAT} ${WRKSRC}/wordnet.py.tmp \ diff --git a/textproc/py-wordnet/files/concordance.py b/textproc/py-wordnet/files/concordance.py new file mode 100644 index 00000000000..89caef0036a --- /dev/null +++ b/textproc/py-wordnet/files/concordance.py @@ -0,0 +1,128 @@ +# some accessing of the semantic concordance data for wordnet 1.6 +# by Des Berry, berry@ais.it + +import string, os +from wordnet import binarySearchFile + +# Sample entries in the 'taglist' file +# ordinary%1:18:01:: 1 br-a01:78,1;86,1;88,4 +# ordered%5:00:00:organized:01 2 br-j23:6,14;13,32;66,12 +# where the general form is: +# lemma%ss_type:lex_filenum:lex_id:head_word:head_id sense_number +[location_list] +# location_list: filename:sent_num,word_num[;sent_num,word_num...] + +ss_type = ("NOUN", "VERB", "ADJECTIVE", "ADVERB", "ADJECTIVE SATELLITE") + +# given a sentence number (and the contents of a semantic concordance file) +# return a string of words as the sentence +def find_sentence(snum, msg): + str = "" % snum + s = string.find(msg, str) + if s < 0: + return "" + s = s + len(str) + sentence = "" + tag = "" + while 1: + if msg[s] == '\n': + s = s + 1 + n = string.find(msg, '<', s) + if n < 0: + break + if n - s != 0: + if tag == "w" and msg[s] != "'" and len(sentence) > 0: # word form + sentence = sentence + " " + sentence = sentence + msg[s:n] + e = string.find(msg, '>', n) + if e < 0: + break + tag = msg[n+1] + if tag == "/": #check for ending sentence + if msg[n+2] == 's': + #end of sentence + break + s = e + 1 + return sentence + +# given a taglist sense (one line of the tagfile) and where to find the tagfile (root) +# return a tuple of +# symset type ('1' .. '5') +# sense (numeric character string) +# list of sentences (constructed from the taglist) +def tagsentence(tag, root): + s = string.find(tag, '%') + sentence = [] + type = tag[s+1] + c = s + for i in range(0,4): + c = string.find(tag, ':', c + 1) + c = string.find(tag, ' ', c + 1) + sense = tag[c+1] + c = c + 3 + while 1: + d = string.find(tag, ' ', c) # file separator + if d < 0: + loclist = tag[c:] + else: + loclist = tag[c:d] + c = d + 1 + + e = string.find(loclist, ':') + filename = loclist[:e] + fh = open(root + filename, "rb") + msg = fh.read() + fh.close() + + while 1: + e = e + 1 + f = string.find(loclist, ';', e) + if f < 0: + sent_word = loclist[e:] + else: + sent_word = loclist[e:f] + e = f + + g = string.find(sent_word, ',') + sent = sent_word[:g] + + sentence.append(find_sentence(sent, msg)) + + if f < 0: + break + + if d < 0: + break + return (type, sense, sentence) + +# given a word to search for and where to find the files (root) +# displays the information +# This could be changed to display in different ways! +def sentences(word, root): + cache = {} + file = open(root + "taglist", "rb") + key = word + "%" + keylen = len(key) + binarySearchFile(file, key + " ", cache, 10) + print("Word '%s'" % word) + while 1: + line = file.readline() + if line[:keylen] != key: + break + type, sense, sentence = tagsentence(line, root + "tagfiles/") + print(ss_type[string.atoi(type) - 1], sense) + for sent in sentence: + print(sent) + + +def _test(word, corpus, base): + print(corpus) + sentences("ordinary", base + corpus + "/") + +if __name__ == '__main__': + base = "C:/win16/dict/semcor/" + word = "ordinary" + _test(word, "brown1", base) + _test(word, "brown2", base) + _test(word, "brownv", base) + diff --git a/textproc/py-wordnet/files/patch-2to3 b/textproc/py-wordnet/files/patch-2to3 new file mode 100644 index 00000000000..aba2f4cf6d9 --- /dev/null +++ b/textproc/py-wordnet/files/patch-2to3 @@ -0,0 +1,1594 @@ +--- wntools.py.orig 2004-07-19 05:09:43 UTC ++++ wntools.py +@@ -33,6 +33,7 @@ __author__ = "Oliver Steele " + __version__ = "2.0" + + from wordnet import * ++from functools import reduce + + # + # Domain utilities +@@ -41,9 +42,9 @@ from wordnet import * + def _requireSource(entity): + if not hasattr(entity, 'pointers'): + if isinstance(entity, Word): +- raise TypeError, `entity` + " is not a Sense or Synset. Try " + `entity` + "[0] instead." ++ raise TypeError(repr(entity) + " is not a Sense or Synset. Try " + repr(entity) + "[0] instead.") + else: +- raise TypeError, `entity` + " is not a Sense or Synset" ++ raise TypeError(repr(entity) + " is not a Sense or Synset") + + def tree(source, pointerType): + """ +@@ -64,9 +65,9 @@ def tree(source, pointerType): + >>> #pprint(tree(dog, HYPONYM)) # too verbose to include here + """ + if isinstance(source, Word): +- return map(lambda s, t=pointerType:tree(s,t), source.getSenses()) ++ return list(map(lambda s, t=pointerType:tree(s,t), source.getSenses())) + _requireSource(source) +- return [source] + map(lambda s, t=pointerType:tree(s,t), source.pointerTargets(pointerType)) ++ return [source] + list(map(lambda s, t=pointerType:tree(s,t), source.pointerTargets(pointerType))) + + def closure(source, pointerType, accumulator=None): + """Return the transitive closure of source under the pointerType +@@ -78,7 +79,7 @@ def closure(source, pointerType, accumulator=None): + ['dog' in {noun: dog, domestic dog, Canis familiaris}, {noun: canine, canid}, {noun: carnivore}, {noun: placental, placental mammal, eutherian, eutherian mammal}, {noun: mammal}, {noun: vertebrate, craniate}, {noun: chordate}, {noun: animal, animate being, beast, brute, creature, fauna}, {noun: organism, being}, {noun: living thing, animate thing}, {noun: object, physical object}, {noun: entity}] + """ + if isinstance(source, Word): +- return reduce(union, map(lambda s, t=pointerType:tree(s,t), source.getSenses())) ++ return reduce(union, list(map(lambda s, t=pointerType:tree(s,t), source.getSenses()))) + _requireSource(source) + if accumulator is None: + accumulator = [] +@@ -193,7 +194,7 @@ def product(u, v): + >>> product("123", "abc") + [('1', 'a'), ('1', 'b'), ('1', 'c'), ('2', 'a'), ('2', 'b'), ('2', 'c'), ('3', 'a'), ('3', 'b'), ('3', 'c')] + """ +- return flatten1(map(lambda a, v=v:map(lambda b, a=a:(a,b), v), u)) ++ return flatten1(list(map(lambda a, v=v:list(map(lambda b, a=a:(a,b), v)), u))) + + def removeDuplicates(sequence): + """Return a copy of _sequence_ with equal items removed. +@@ -242,12 +243,12 @@ def getIndex(form, pos='noun'): + transformed string until a match is found or all the different + strings have been tried. It returns a Word or None.""" + def trySubstitutions(trySubstitutions, form, substitutions, lookup=1, dictionary=dictionaryFor(pos)): +- if lookup and dictionary.has_key(form): ++ if lookup and form in dictionary: + return dictionary[form] + elif substitutions: + (old, new) = substitutions[0] + substitute = string.replace(form, old, new) and substitute != form +- if substitute and dictionary.has_key(substitute): ++ if substitute and substitute in dictionary: + return dictionary[substitute] + return trySubstitutions(trySubstitutions, form, substitutions[1:], lookup=0) or \ + (substitute and trySubstitutions(trySubstitutions, substitute, substitutions[1:])) +@@ -313,7 +314,7 @@ def morphy(form, pos='noun', collect=0): + exceptions = binarySearchFile(excfile, form) + if exceptions: + form = exceptions[string.find(exceptions, ' ')+1:-1] +- if lookup and dictionary.has_key(form): ++ if lookup and form in dictionary: + if collect: + collection.append(form) + else: +--- wordnet.py.orig 2004-07-19 06:11:31 UTC ++++ wordnet.py +@@ -53,9 +53,9 @@ WNHOME = environ.get('WNHOME', { + 'mac': ":", + 'dos': "C:\\wn16", + 'nt': "C:\\Program Files\\WordNet\\2.0"} +- .get(os.name, "/usr/local/wordnet2.0")) ++ .get(os.name, "/usr/local/share/py-wordnet")) + +-WNSEARCHDIR = environ.get('WNSEARCHDIR', os.path.join(WNHOME, {'mac': "Database"}.get(os.name, "dict"))) ++WNSEARCHDIR = environ.get('WNSEARCHDIR', WNHOME) + + ReadableRepresentations = 1 + """If true, repr(word), repr(sense), and repr(synset) return +@@ -210,15 +210,15 @@ class Word: + + def __init__(self, line): + """Initialize the word from a line of a WN POS file.""" +- tokens = string.split(line) +- ints = map(int, tokens[int(tokens[3]) + 4:]) +- self.form = string.replace(tokens[0], '_', ' ') ++ tokens = string.split(line) ++ ints = list(map(int, tokens[int(tokens[3]) + 4:])) ++ self.form = string.replace(tokens[0], '_', ' ') + "Orthographic representation of the word." +- self.pos = _normalizePOS(tokens[1]) ++ self.pos = _normalizePOS(tokens[1]) + "Part of speech. One of NOUN, VERB, ADJECTIVE, ADVERB." +- self.taggedSenseCount = ints[1] ++ self.taggedSenseCount = ints[1] + "Number of senses that are tagged." +- self._synsetOffsets = ints[2:ints[0]+2] ++ self._synsetOffsets = ints[2:ints[0]+2] + + def getPointers(self, pointerType=None): + """Pointers connect senses and synsets, not words. +@@ -231,18 +231,18 @@ class Word: + raise self.getPointers.__doc__ + + def getSenses(self): +- """Return a sequence of senses. +- +- >>> N['dog'].getSenses() +- ('dog' in {noun: dog, domestic dog, Canis familiaris}, 'dog' in {noun: frump, dog}, 'dog' in {noun: dog}, 'dog' in {noun: cad, bounder, blackguard, dog, hound, heel}, 'dog' in {noun: frank, frankfurter, hotdog, hot dog, dog, wiener, wienerwurst, weenie}, 'dog' in {noun: pawl, detent, click, dog}, 'dog' in {noun: andiron, firedog, dog, dog-iron}) +- """ +- if not hasattr(self, '_senses'): +- def getSense(offset, pos=self.pos, form=self.form): +- return getSynset(pos, offset)[form] +- self._senses = tuple(map(getSense, self._synsetOffsets)) +- del self._synsetOffsets +- return self._senses ++ """Return a sequence of senses. + ++ >>> N['dog'].getSenses() ++ ('dog' in {noun: dog, domestic dog, Canis familiaris}, 'dog' in {noun: frump, dog}, 'dog' in {noun: dog}, 'dog' in {noun: cad, bounder, blackguard, dog, hound, heel}, 'dog' in {noun: frank, frankfurter, hotdog, hot dog, dog, wiener, wienerwurst, weenie}, 'dog' in {noun: pawl, detent, click, dog}, 'dog' in {noun: andiron, firedog, dog, dog-iron}) ++ """ ++ if not hasattr(self, '_senses'): ++ def getSense(offset, pos=self.pos, form=self.form): ++ return getSynset(pos, offset)[form] ++ self._senses = tuple(map(getSense, self._synsetOffsets)) ++ del self._synsetOffsets ++ return self._senses ++ + # Deprecated. Present for backwards compatability. + def senses(self): + import wordnet +@@ -253,70 +253,70 @@ class Word: + return self.getSense() + + def isTagged(self): +- """Return 1 if any sense is tagged. +- +- >>> N['dog'].isTagged() +- 1 +- """ +- return self.taggedSenseCount > 0 ++ """Return 1 if any sense is tagged. ++ ++ >>> N['dog'].isTagged() ++ 1 ++ """ ++ return self.taggedSenseCount > 0 + + def getAdjectivePositions(self): +- """Return a sequence of adjective positions that this word can +- appear in. These are elements of ADJECTIVE_POSITIONS. +- +- >>> ADJ['clear'].getAdjectivePositions() +- [None, 'predicative'] +- """ +- positions = {} +- for sense in self.getSenses(): +- positions[sense.position] = 1 +- return positions.keys() ++ """Return a sequence of adjective positions that this word can ++ appear in. These are elements of ADJECTIVE_POSITIONS. ++ ++ >>> ADJ['clear'].getAdjectivePositions() ++ [None, 'predicative'] ++ """ ++ positions = {} ++ for sense in self.getSenses(): ++ positions[sense.position] = 1 ++ return list(positions.keys()) + + adjectivePositions = getAdjectivePositions # backwards compatability + + def __cmp__(self, other): +- """ +- >>> N['cat'] < N['dog'] +- 1 +- >>> N['dog'] < V['dog'] +- 1 +- """ +- return _compareInstances(self, other, ('pos', 'form')) ++ """ ++ >>> N['cat'] < N['dog'] ++ 1 ++ >>> N['dog'] < V['dog'] ++ 1 ++ """ ++ return _compareInstances(self, other, ('pos', 'form')) + + def __str__(self): +- """Return a human-readable representation. +- +- >>> str(N['dog']) +- 'dog(n.)' +- """ +- abbrs = {NOUN: 'n.', VERB: 'v.', ADJECTIVE: 'adj.', ADVERB: 'adv.'} +- return self.form + "(" + abbrs[self.pos] + ")" ++ """Return a human-readable representation. ++ ++ >>> str(N['dog']) ++ 'dog(n.)' ++ """ ++ abbrs = {NOUN: 'n.', VERB: 'v.', ADJECTIVE: 'adj.', ADVERB: 'adv.'} ++ return self.form + "(" + abbrs[self.pos] + ")" + + def __repr__(self): +- """If ReadableRepresentations is true, return a human-readable +- representation, e.g. 'dog(n.)'. +- +- If ReadableRepresentations is false, return a machine-readable +- representation, e.g. "getWord('dog', 'noun')". +- """ +- if ReadableRepresentations: +- return str(self) +- return "getWord" + `(self.form, self.pos)` +- ++ """If ReadableRepresentations is true, return a human-readable ++ representation, e.g. 'dog(n.)'. ++ ++ If ReadableRepresentations is false, return a machine-readable ++ representation, e.g. "getWord('dog', 'noun')". ++ """ ++ if ReadableRepresentations: ++ return str(self) ++ return "getWord" + repr((self.form, self.pos)) ++ + # + # Sequence protocol (a Word's elements are its Senses) + # +- def __nonzero__(self): +- return 1 ++ def __bool__(self): ++ return 1 + + def __len__(self): +- return len(self.getSenses()) ++ return len(self.getSenses()) + + def __getitem__(self, index): +- return self.getSenses()[index] ++ return self.getSenses()[index] + + def __getslice__(self, i, j): +- return self.getSenses()[i:j] ++ return self.getSenses()[i:j] + + + class Synset: +@@ -354,157 +354,157 @@ class Synset: + + def __init__(self, pos, offset, line): + "Initialize the synset from a line off a WN synset file." +- self.pos = pos ++ self.pos = pos + "part of speech -- one of NOUN, VERB, ADJECTIVE, ADVERB." +- self.offset = offset ++ self.offset = offset + """integer offset into the part-of-speech file. Together + with pos, this can be used as a unique id.""" +- tokens = string.split(line[:string.index(line, '|')]) +- self.ssType = tokens[2] +- self.gloss = string.strip(line[string.index(line, '|') + 1:]) ++ tokens = string.split(line[:string.index(line, '|')]) ++ self.ssType = tokens[2] ++ self.gloss = string.strip(line[string.index(line, '|') + 1:]) + self.lexname = Lexname.lexnames[int(tokens[1])] +- (self._senseTuples, remainder) = _partition(tokens[4:], 2, string.atoi(tokens[3], 16)) +- (self._pointerTuples, remainder) = _partition(remainder[1:], 4, int(remainder[0])) +- if pos == VERB: +- (vfTuples, remainder) = _partition(remainder[1:], 3, int(remainder[0])) +- def extractVerbFrames(index, vfTuples): +- return tuple(map(lambda t:string.atoi(t[1]), filter(lambda t,i=index:string.atoi(t[2],16) in (0, i), vfTuples))) +- senseVerbFrames = [] +- for index in range(1, len(self._senseTuples) + 1): +- senseVerbFrames.append(extractVerbFrames(index, vfTuples)) +- self._senseVerbFrames = senseVerbFrames +- self.verbFrames = tuple(extractVerbFrames(None, vfTuples)) ++ (self._senseTuples, remainder) = _partition(tokens[4:], 2, string.atoi(tokens[3], 16)) ++ (self._pointerTuples, remainder) = _partition(remainder[1:], 4, int(remainder[0])) ++ if pos == VERB: ++ (vfTuples, remainder) = _partition(remainder[1:], 3, int(remainder[0])) ++ def extractVerbFrames(index, vfTuples): ++ return tuple([string.atoi(t[1]) for t in list(filter(lambda t,i=index:string.atoi(t[2],16) in (0, i), vfTuples))]) ++ senseVerbFrames = [] ++ for index in range(1, len(self._senseTuples) + 1): ++ senseVerbFrames.append(extractVerbFrames(index, vfTuples)) ++ self._senseVerbFrames = senseVerbFrames ++ self.verbFrames = tuple(extractVerbFrames(None, vfTuples)) + """A sequence of integers that index into + VERB_FRAME_STRINGS. These list the verb frames that any + Sense in this synset participates in. (See also + Sense.verbFrames.) Defined only for verbs.""" + + def getSenses(self): +- """Return a sequence of Senses. +- +- >>> N['dog'][0].getSenses() +- ('dog' in {noun: dog, domestic dog, Canis familiaris},) +- """ +- if not hasattr(self, '_senses'): +- def loadSense(senseTuple, verbFrames=None, synset=self): +- return Sense(synset, senseTuple, verbFrames) +- if self.pos == VERB: +- self._senses = tuple(map(loadSense, self._senseTuples, self._senseVerbFrames)) +- del self._senseVerbFrames +- else: +- self._senses = tuple(map(loadSense, self._senseTuples)) +- del self._senseTuples +- return self._senses ++ """Return a sequence of Senses. ++ ++ >>> N['dog'][0].getSenses() ++ ('dog' in {noun: dog, domestic dog, Canis familiaris},) ++ """ ++ if not hasattr(self, '_senses'): ++ def loadSense(senseTuple, verbFrames=None, synset=self): ++ return Sense(synset, senseTuple, verbFrames) ++ if self.pos == VERB: ++ self._senses = tuple(map(loadSense, self._senseTuples, self._senseVerbFrames)) ++ del self._senseVerbFrames ++ else: ++ self._senses = tuple(map(loadSense, self._senseTuples)) ++ del self._senseTuples ++ return self._senses + + senses = getSenses + + def getPointers(self, pointerType=None): +- """Return a sequence of Pointers. ++ """Return a sequence of Pointers. + + If pointerType is specified, only pointers of that type are + returned. In this case, pointerType should be an element of + POINTER_TYPES. +- +- >>> N['dog'][0].getPointers()[:5] +- (hypernym -> {noun: canine, canid}, member meronym -> {noun: Canis, genus Canis}, member meronym -> {noun: pack}, hyponym -> {noun: pooch, doggie, doggy, barker, bow-wow}, hyponym -> {noun: cur, mongrel, mutt}) +- >>> N['dog'][0].getPointers(HYPERNYM) +- (hypernym -> {noun: canine, canid},) +- """ +- if not hasattr(self, '_pointers'): +- def loadPointer(tuple, synset=self): +- return Pointer(synset.offset, tuple) +- self._pointers = tuple(map(loadPointer, self._pointerTuples)) +- del self._pointerTuples +- if pointerType == None: +- return self._pointers +- else: +- _requirePointerType(pointerType) +- return filter(lambda pointer, type=pointerType: pointer.type == type, self._pointers) ++ ++ >>> N['dog'][0].getPointers()[:5] ++ (hypernym -> {noun: canine, canid}, member meronym -> {noun: Canis, genus Canis}, member meronym -> {noun: pack}, hyponym -> {noun: pooch, doggie, doggy, barker, bow-wow}, hyponym -> {noun: cur, mongrel, mutt}) ++ >>> N['dog'][0].getPointers(HYPERNYM) ++ (hypernym -> {noun: canine, canid},) ++ """ ++ if not hasattr(self, '_pointers'): ++ def loadPointer(tuple, synset=self): ++ return Pointer(synset.offset, tuple) ++ self._pointers = tuple(map(loadPointer, self._pointerTuples)) ++ del self._pointerTuples ++ if pointerType == None: ++ return self._pointers ++ else: ++ _requirePointerType(pointerType) ++ return list(filter(lambda pointer, type=pointerType: pointer.type == type, self._pointers)) + + pointers = getPointers # backwards compatability + + def getPointerTargets(self, pointerType=None): +- """Return a sequence of Senses or Synsets. +- ++ """Return a sequence of Senses or Synsets. ++ + If pointerType is specified, only targets of pointers of that + type are returned. In this case, pointerType should be an + element of POINTER_TYPES. +- +- >>> N['dog'][0].getPointerTargets()[:5] +- [{noun: canine, canid}, {noun: Canis, genus Canis}, {noun: pack}, {noun: pooch, doggie, doggy, barker, bow-wow}, {noun: cur, mongrel, mutt}] +- >>> N['dog'][0].getPointerTargets(HYPERNYM) +- [{noun: canine, canid}] +- """ +- return map(Pointer.target, self.getPointers(pointerType)) ++ ++ >>> N['dog'][0].getPointerTargets()[:5] ++ [{noun: canine, canid}, {noun: Canis, genus Canis}, {noun: pack}, {noun: pooch, doggie, doggy, barker, bow-wow}, {noun: cur, mongrel, mutt}] ++ >>> N['dog'][0].getPointerTargets(HYPERNYM) ++ [{noun: canine, canid}] ++ """ ++ return list(map(Pointer.target, self.getPointers(pointerType))) + + pointerTargets = getPointerTargets # backwards compatability + + def isTagged(self): +- """Return 1 if any sense is tagged. +- +- >>> N['dog'][0].isTagged() +- 1 +- >>> N['dog'][1].isTagged() +- 0 +- """ +- return len(filter(Sense.isTagged, self.getSenses())) > 0 ++ """Return 1 if any sense is tagged. ++ ++ >>> N['dog'][0].isTagged() ++ 1 ++ >>> N['dog'][1].isTagged() ++ 0 ++ """ ++ return len(list(filter(Sense.isTagged, self.getSenses()))) > 0 + + def __str__(self): +- """Return a human-readable representation. +- +- >>> str(N['dog'][0].synset) +- '{noun: dog, domestic dog, Canis familiaris}' +- """ +- return "{" + self.pos + ": " + string.joinfields(map(lambda sense:sense.form, self.getSenses()), ", ") + "}" ++ """Return a human-readable representation. ++ ++ >>> str(N['dog'][0].synset) ++ '{noun: dog, domestic dog, Canis familiaris}' ++ """ ++ return "{" + self.pos + ": " + string.joinfields([sense.form for sense in self.getSenses()], ", ") + "}" + + def __repr__(self): +- """If ReadableRepresentations is true, return a human-readable +- representation, e.g. 'dog(n.)'. +- +- If ReadableRepresentations is false, return a machine-readable +- representation, e.g. "getSynset(pos, 1234)". +- """ +- if ReadableRepresentations: +- return str(self) +- return "getSynset" + `(self.pos, self.offset)` ++ """If ReadableRepresentations is true, return a human-readable ++ representation, e.g. 'dog(n.)'. ++ ++ If ReadableRepresentations is false, return a machine-readable ++ representation, e.g. "getSynset(pos, 1234)". ++ """ ++ if ReadableRepresentations: ++ return str(self) ++ return "getSynset" + repr((self.pos, self.offset)) + + def __cmp__(self, other): +- return _compareInstances(self, other, ('pos', 'offset')) ++ return _compareInstances(self, other, ('pos', 'offset')) + + # + # Sequence protocol (a Synset's elements are its senses). + # +- def __nonzero__(self): +- return 1 ++ def __bool__(self): ++ return 1 + + def __len__(self): +- """ +- >>> len(N['dog'][0].synset) +- 3 +- """ +- return len(self.getSenses()) ++ """ ++ >>> len(N['dog'][0].synset) ++ 3 ++ """ ++ return len(self.getSenses()) + + def __getitem__(self, idx): +- """ +- >>> N['dog'][0].synset[0] == N['dog'][0] +- 1 +- >>> N['dog'][0].synset['dog'] == N['dog'][0] +- 1 +- >>> N['dog'][0].synset[N['dog']] == N['dog'][0] +- 1 +- >>> N['cat'][6] +- 'cat' in {noun: big cat, cat} +- """ +- senses = self.getSenses() +- if isinstance(idx, Word): +- idx = idx.form +- if isinstance(idx, StringType): +- idx = _index(idx, map(lambda sense:sense.form, senses)) or \ +- _index(idx, map(lambda sense:sense.form, senses), _equalsIgnoreCase) +- return senses[idx] ++ """ ++ >>> N['dog'][0].synset[0] == N['dog'][0] ++ 1 ++ >>> N['dog'][0].synset['dog'] == N['dog'][0] ++ 1 ++ >>> N['dog'][0].synset[N['dog']] == N['dog'][0] ++ 1 ++ >>> N['cat'][6] ++ 'cat' in {noun: big cat, cat} ++ """ ++ senses = self.getSenses() ++ if isinstance(idx, Word): ++ idx = idx.form ++ if isinstance(idx, StringType): ++ idx = _index(idx, [sense.form for sense in senses]) or \ ++ _index(idx, [sense.form for sense in senses], _equalsIgnoreCase) ++ return senses[idx] + + def __getslice__(self, i, j): +- return self.getSenses()[i:j] ++ return self.getSenses()[i:j] + + + class Sense: +@@ -525,7 +525,7 @@ class Sense: + VERB_FRAME_STRINGS. These list the verb frames that this + Sense partipates in. Defined only for verbs. + +- >>> decide = V['decide'][0].synset # first synset for 'decide' ++ >>> decide = V['decide'][0].synset # first synset for 'decide' + >>> decide[0].verbFrames + (8, 2, 26, 29) + >>> decide[1].verbFrames +@@ -536,124 +536,124 @@ class Sense: + + def __init__(sense, synset, senseTuple, verbFrames=None): + "Initialize a sense from a synset's senseTuple." +- # synset is stored by key (pos, synset) rather than object +- # reference, to avoid creating a circular reference between +- # Senses and Synsets that will prevent the vm from +- # garbage-collecting them. +- sense.pos = synset.pos ++ # synset is stored by key (pos, synset) rather than object ++ # reference, to avoid creating a circular reference between ++ # Senses and Synsets that will prevent the vm from ++ # garbage-collecting them. ++ sense.pos = synset.pos + "part of speech -- one of NOUN, VERB, ADJECTIVE, ADVERB" +- sense.synsetOffset = synset.offset ++ sense.synsetOffset = synset.offset + "synset key. This is used to retrieve the sense." +- sense.verbFrames = verbFrames ++ sense.verbFrames = verbFrames + """A sequence of integers that index into + VERB_FRAME_STRINGS. These list the verb frames that this + Sense partipates in. Defined only for verbs.""" +- (form, idString) = senseTuple +- sense.position = None +- if '(' in form: +- index = string.index(form, '(') +- key = form[index + 1:-1] +- form = form[:index] +- if key == 'a': +- sense.position = ATTRIBUTIVE +- elif key == 'p': +- sense.position = PREDICATIVE +- elif key == 'ip': +- sense.position = IMMEDIATE_POSTNOMINAL +- else: +- raise "unknown attribute " + key +- sense.form = string.replace(form, '_', ' ') ++ (form, idString) = senseTuple ++ sense.position = None ++ if '(' in form: ++ index = string.index(form, '(') ++ key = form[index + 1:-1] ++ form = form[:index] ++ if key == 'a': ++ sense.position = ATTRIBUTIVE ++ elif key == 'p': ++ sense.position = PREDICATIVE ++ elif key == 'ip': ++ sense.position = IMMEDIATE_POSTNOMINAL ++ else: ++ raise "unknown attribute " + key ++ sense.form = string.replace(form, '_', ' ') + "orthographic representation of the Word this is a Sense of." + + def __getattr__(self, name): +- # see the note at __init__ about why 'synset' is provided as a +- # 'virtual' slot +- if name == 'synset': +- return getSynset(self.pos, self.synsetOffset) ++ # see the note at __init__ about why 'synset' is provided as a ++ # 'virtual' slot ++ if name == 'synset': ++ return getSynset(self.pos, self.synsetOffset) + elif name == 'lexname': + return self.synset.lexname +- else: +- raise AttributeError, name ++ else: ++ raise AttributeError(name) + + def __str__(self): +- """Return a human-readable representation. +- +- >>> str(N['dog']) +- 'dog(n.)' +- """ +- return `self.form` + " in " + str(self.synset) ++ """Return a human-readable representation. ++ ++ >>> str(N['dog']) ++ 'dog(n.)' ++ """ ++ return repr(self.form) + " in " + str(self.synset) + + def __repr__(self): +- """If ReadableRepresentations is true, return a human-readable +- representation, e.g. 'dog(n.)'. +- +- If ReadableRepresentations is false, return a machine-readable +- representation, e.g. "getWord('dog', 'noun')". +- """ +- if ReadableRepresentations: +- return str(self) +- return "%s[%s]" % (`self.synset`, `self.form`) ++ """If ReadableRepresentations is true, return a human-readable ++ representation, e.g. 'dog(n.)'. ++ ++ If ReadableRepresentations is false, return a machine-readable ++ representation, e.g. "getWord('dog', 'noun')". ++ """ ++ if ReadableRepresentations: ++ return str(self) ++ return "%s[%s]" % (repr(self.synset), repr(self.form)) + + def getPointers(self, pointerType=None): +- """Return a sequence of Pointers. +- ++ """Return a sequence of Pointers. ++ + If pointerType is specified, only pointers of that type are + returned. In this case, pointerType should be an element of + POINTER_TYPES. +- +- >>> N['dog'][0].getPointers()[:5] +- (hypernym -> {noun: canine, canid}, member meronym -> {noun: Canis, genus Canis}, member meronym -> {noun: pack}, hyponym -> {noun: pooch, doggie, doggy, barker, bow-wow}, hyponym -> {noun: cur, mongrel, mutt}) +- >>> N['dog'][0].getPointers(HYPERNYM) +- (hypernym -> {noun: canine, canid},) +- """ +- senseIndex = _index(self, self.synset.getSenses()) +- def pointsFromThisSense(pointer, selfIndex=senseIndex): +- return pointer.sourceIndex == 0 or pointer.sourceIndex - 1 == selfIndex +- return filter(pointsFromThisSense, self.synset.getPointers(pointerType)) ++ ++ >>> N['dog'][0].getPointers()[:5] ++ (hypernym -> {noun: canine, canid}, member meronym -> {noun: Canis, genus Canis}, member meronym -> {noun: pack}, hyponym -> {noun: pooch, doggie, doggy, barker, bow-wow}, hyponym -> {noun: cur, mongrel, mutt}) ++ >>> N['dog'][0].getPointers(HYPERNYM) ++ (hypernym -> {noun: canine, canid},) ++ """ ++ senseIndex = _index(self, self.synset.getSenses()) ++ def pointsFromThisSense(pointer, selfIndex=senseIndex): ++ return pointer.sourceIndex == 0 or pointer.sourceIndex - 1 == selfIndex ++ return list(filter(pointsFromThisSense, self.synset.getPointers(pointerType))) + + pointers = getPointers # backwards compatability + + def getPointerTargets(self, pointerType=None): +- """Return a sequence of Senses or Synsets. +- ++ """Return a sequence of Senses or Synsets. ++ + If pointerType is specified, only targets of pointers of that + type are returned. In this case, pointerType should be an + element of POINTER_TYPES. +- +- >>> N['dog'][0].getPointerTargets()[:5] +- [{noun: canine, canid}, {noun: Canis, genus Canis}, {noun: pack}, {noun: pooch, doggie, doggy, barker, bow-wow}, {noun: cur, mongrel, mutt}] +- >>> N['dog'][0].getPointerTargets(HYPERNYM) +- [{noun: canine, canid}] +- """ +- return map(Pointer.target, self.getPointers(pointerType)) ++ ++ >>> N['dog'][0].getPointerTargets()[:5] ++ [{noun: canine, canid}, {noun: Canis, genus Canis}, {noun: pack}, {noun: pooch, doggie, doggy, barker, bow-wow}, {noun: cur, mongrel, mutt}] ++ >>> N['dog'][0].getPointerTargets(HYPERNYM) ++ [{noun: canine, canid}] ++ """ ++ return list(map(Pointer.target, self.getPointers(pointerType))) + + pointerTargets = getPointerTargets # backwards compatability + + def getSenses(self): +- return self, ++ return self, + + senses = getSenses # backwards compatability + + def isTagged(self): +- """Return 1 if any sense is tagged. +- +- >>> N['dog'][0].isTagged() +- 1 +- >>> N['dog'][1].isTagged() +- 0 +- """ +- word = self.word() +- return _index(self, word.getSenses()) < word.taggedSenseCount ++ """Return 1 if any sense is tagged. ++ ++ >>> N['dog'][0].isTagged() ++ 1 ++ >>> N['dog'][1].isTagged() ++ 0 ++ """ ++ word = self.word() ++ return _index(self, word.getSenses()) < word.taggedSenseCount + + def getWord(self): +- return getWord(self.form, self.pos) ++ return getWord(self.form, self.pos) + + word = getWord # backwards compatability + + def __cmp__(self, other): +- def senseIndex(sense, synset=self.synset): +- return _index(sense, synset.getSenses(), testfn=lambda a,b: a.form == b.form) +- return _compareInstances(self, other, ('synset',)) or cmp(senseIndex(self), senseIndex(other)) ++ def senseIndex(sense, synset=self.synset): ++ return _index(sense, synset.getSenses(), testfn=lambda a,b: a.form == b.form) ++ return _compareInstances(self, other, ('synset',)) or cmp(senseIndex(self), senseIndex(other)) + + + class Pointer: +@@ -668,21 +668,21 @@ class Pointer: + """ + + _POINTER_TYPE_TABLE = { +- '!': ANTONYM, ++ '!': ANTONYM, + '@': HYPERNYM, + '~': HYPONYM, +- '=': ATTRIBUTE, ++ '=': ATTRIBUTE, + '^': ALSO_SEE, + '*': ENTAILMENT, + '>': CAUSE, +- '$': VERB_GROUP, +- '#m': MEMBER_MERONYM, ++ '$': VERB_GROUP, ++ '#m': MEMBER_MERONYM, + '#s': SUBSTANCE_MERONYM, + '#p': PART_MERONYM, +- '%m': MEMBER_HOLONYM, ++ '%m': MEMBER_HOLONYM, + '%s': SUBSTANCE_HOLONYM, + '%p': PART_HOLONYM, +- '&': SIMILAR, ++ '&': SIMILAR, + '<': PARTICIPLE_OF, + '\\': PERTAINYM, + # New in wn 2.0: +@@ -696,51 +696,51 @@ class Pointer: + } + + def __init__(self, sourceOffset, pointerTuple): +- (type, offset, pos, indices) = pointerTuple +- self.type = Pointer._POINTER_TYPE_TABLE[type] ++ (type, offset, pos, indices) = pointerTuple ++ self.type = Pointer._POINTER_TYPE_TABLE[type] + """One of POINTER_TYPES.""" +- self.sourceOffset = sourceOffset +- self.targetOffset = int(offset) +- self.pos = _normalizePOS(pos) ++ self.sourceOffset = sourceOffset ++ self.targetOffset = int(offset) ++ self.pos = _normalizePOS(pos) + """part of speech -- one of NOUN, VERB, ADJECTIVE, ADVERB""" +- indices = string.atoi(indices, 16) +- self.sourceIndex = indices >> 8 +- self.targetIndex = indices & 255 ++ indices = string.atoi(indices, 16) ++ self.sourceIndex = indices >> 8 ++ self.targetIndex = indices & 255 + + def getSource(self): +- synset = getSynset(self.pos, self.sourceOffset) +- if self.sourceIndex: +- return synset[self.sourceIndex - 1] +- else: +- return synset ++ synset = getSynset(self.pos, self.sourceOffset) ++ if self.sourceIndex: ++ return synset[self.sourceIndex - 1] ++ else: ++ return synset + + source = getSource # backwards compatability + + def getTarget(self): +- synset = getSynset(self.pos, self.targetOffset) +- if self.targetIndex: +- return synset[self.targetIndex - 1] +- else: +- return synset ++ synset = getSynset(self.pos, self.targetOffset) ++ if self.targetIndex: ++ return synset[self.targetIndex - 1] ++ else: ++ return synset + + target = getTarget # backwards compatability + + def __str__(self): +- return self.type + " -> " + str(self.target()) ++ return self.type + " -> " + str(self.target()) + + def __repr__(self): +- if ReadableRepresentations: +- return str(self) +- return "<" + str(self) + ">" ++ if ReadableRepresentations: ++ return str(self) ++ return "<" + str(self) + ">" + + def __cmp__(self, other): +- diff = _compareInstances(self, other, ('pos', 'sourceOffset')) +- if diff: +- return diff +- synset = self.source() +- def pointerIndex(sense, synset=synset): +- return _index(sense, synset.getPointers(), testfn=lambda a,b: not _compareInstances(a, b, ('type', 'sourceIndex', 'targetIndex'))) +- return cmp(pointerIndex(self), pointerIndex(other)) ++ diff = _compareInstances(self, other, ('pos', 'sourceOffset')) ++ if diff: ++ return diff ++ synset = self.source() ++ def pointerIndex(sense, synset=synset): ++ return _index(sense, synset.getPointers(), testfn=lambda a,b: not _compareInstances(a, b, ('type', 'sourceIndex', 'targetIndex'))) ++ return cmp(pointerIndex(self), pointerIndex(other)) + + + # Loading the lexnames +@@ -794,59 +794,59 @@ class Dictionary: + """ + + def __init__(self, pos, filenameroot): +- self.pos = pos ++ self.pos = pos + """part of speech -- one of NOUN, VERB, ADJECTIVE, ADVERB""" +- self.indexFile = _IndexFile(pos, filenameroot) +- self.dataFile = open(_dataFilePathname(filenameroot), _FILE_OPEN_MODE) ++ self.indexFile = _IndexFile(pos, filenameroot) ++ self.dataFile = open(_dataFilePathname(filenameroot), _FILE_OPEN_MODE) + + def __repr__(self): +- dictionaryVariables = {N: 'N', V: 'V', ADJ: 'ADJ', ADV: 'ADV'} +- if dictionaryVariables.get(self): +- return self.__module__ + "." + dictionaryVariables[self] +- return "<%s.%s instance for %s>" % (self.__module__, "Dictionary", self.pos) ++ dictionaryVariables = {N: 'N', V: 'V', ADJ: 'ADJ', ADV: 'ADV'} ++ if dictionaryVariables.get(self): ++ return self.__module__ + "." + dictionaryVariables[self] ++ return "<%s.%s instance for %s>" % (self.__module__, "Dictionary", self.pos) + + def getWord(self, form, line=None): +- key = string.replace(string.lower(form), ' ', '_') +- pos = self.pos +- def loader(key=key, line=line, indexFile=self.indexFile): +- line = line or indexFile.get(key) +- return line and Word(line) +- word = _entityCache.get((pos, key), loader) +- if word: +- return word +- else: +- raise KeyError, "%s is not in the %s database" % (`form`, `pos`) ++ key = string.replace(string.lower(form), ' ', '_') ++ pos = self.pos ++ def loader(key=key, line=line, indexFile=self.indexFile): ++ line = line or indexFile.get(key) ++ return line and Word(line) ++ word = _entityCache.get((pos, key), loader) ++ if word: ++ return word ++ else: ++ raise KeyError("%s is not in the %s database" % (repr(form), repr(pos))) + + def getSynset(self, offset): +- pos = self.pos +- def loader(pos=pos, offset=offset, dataFile=self.dataFile): +- return Synset(pos, offset, _lineAt(dataFile, offset)) +- return _entityCache.get((pos, offset), loader) ++ pos = self.pos ++ def loader(pos=pos, offset=offset, dataFile=self.dataFile): ++ return Synset(pos, offset, _lineAt(dataFile, offset)) ++ return _entityCache.get((pos, offset), loader) + + def _buildIndexCacheFile(self): +- self.indexFile._buildIndexCacheFile() ++ self.indexFile._buildIndexCacheFile() + + # + # Sequence protocol (a Dictionary's items are its Words) + # +- def __nonzero__(self): +- """Return false. (This is to avoid scanning the whole index file +- to compute len when a Dictionary is used in test position.) +- +- >>> N and 'true' +- 'true' +- """ +- return 1 ++ def __bool__(self): ++ """Return false. (This is to avoid scanning the whole index file ++ to compute len when a Dictionary is used in test position.) ++ ++ >>> N and 'true' ++ 'true' ++ """ ++ return 1 + + def __len__(self): +- """Return the number of index entries. +- +- >>> len(ADJ) +- 21435 +- """ +- if not hasattr(self, 'length'): +- self.length = len(self.indexFile) +- return self.length ++ """Return the number of index entries. ++ ++ >>> len(ADJ) ++ 21435 ++ """ ++ if not hasattr(self, 'length'): ++ self.length = len(self.indexFile) ++ return self.length + + def __getslice__(self, a, b): + results = [] +@@ -860,22 +860,22 @@ class Dictionary: + return results + + def __getitem__(self, index): +- """If index is a String, return the Word whose form is +- index. If index is an integer n, return the Word +- indexed by the n'th Word in the Index file. +- +- >>> N['dog'] +- dog(n.) +- >>> N[0] +- 'hood(n.) +- """ +- if isinstance(index, StringType): +- return self.getWord(index) +- elif isinstance(index, IntType): +- line = self.indexFile[index] +- return self.getWord(string.replace(line[:string.find(line, ' ')], '_', ' '), line) +- else: +- raise TypeError, "%s is not a String or Int" % `index` ++ """If index is a String, return the Word whose form is ++ index. If index is an integer n, return the Word ++ indexed by the n'th Word in the Index file. ++ ++ >>> N['dog'] ++ dog(n.) ++ >>> N[0] ++ 'hood(n.) ++ """ ++ if isinstance(index, StringType): ++ return self.getWord(index) ++ elif isinstance(index, IntType): ++ line = self.indexFile[index] ++ return self.getWord(string.replace(line[:string.find(line, ' ')], '_', ' '), line) ++ else: ++ raise TypeError("%s is not a String or Int" % repr(index)) + + # + # Dictionary protocol +@@ -884,54 +884,54 @@ class Dictionary: + # + + def get(self, key, default=None): +- """Return the Word whose form is _key_, or _default_. +- +- >>> N.get('dog') +- dog(n.) +- >>> N.get('inu') +- """ +- try: +- return self[key] +- except LookupError: +- return default ++ """Return the Word whose form is _key_, or _default_. ++ ++ >>> N.get('dog') ++ dog(n.) ++ >>> N.get('inu') ++ """ ++ try: ++ return self[key] ++ except LookupError: ++ return default + + def keys(self): +- """Return a sorted list of strings that index words in this +- dictionary.""" +- return self.indexFile.keys() ++ """Return a sorted list of strings that index words in this ++ dictionary.""" ++ return list(self.indexFile.keys()) + + def has_key(self, form): +- """Return true iff the argument indexes a word in this dictionary. +- +- >>> N.has_key('dog') +- 1 +- >>> N.has_key('inu') +- 0 +- """ +- return self.indexFile.has_key(form) ++ """Return true iff the argument indexes a word in this dictionary. ++ ++ >>> N.has_key('dog') ++ 1 ++ >>> N.has_key('inu') ++ 0 ++ """ ++ return form in self.indexFile + + # + # Testing + # + + def _testKeys(self): +- """Verify that index lookup can find each word in the index file.""" +- print "Testing: ", self +- file = open(self.indexFile.file.name, _FILE_OPEN_MODE) +- counter = 0 +- while 1: +- line = file.readline() +- if line == '': break +- if line[0] != ' ': +- key = string.replace(line[:string.find(line, ' ')], '_', ' ') +- if (counter % 1000) == 0: +- print "%s..." % (key,), +- import sys +- sys.stdout.flush() +- counter = counter + 1 +- self[key] +- file.close() +- print "done." ++ """Verify that index lookup can find each word in the index file.""" ++ print("Testing: ", self) ++ file = open(self.indexFile.file.name, _FILE_OPEN_MODE) ++ counter = 0 ++ while 1: ++ line = file.readline() ++ if line == '': break ++ if line[0] != ' ': ++ key = string.replace(line[:string.find(line, ' ')], '_', ' ') ++ if (counter % 1000) == 0: ++ print("%s..." % (key,), end=' ') ++ import sys ++ sys.stdout.flush() ++ counter = counter + 1 ++ self[key] ++ file.close() ++ print("done.") + + + class _IndexFile: +@@ -939,69 +939,69 @@ class _IndexFile: + Sequence and Dictionary interface to a sorted index file.""" + + def __init__(self, pos, filenameroot): +- self.pos = pos +- self.file = open(_indexFilePathname(filenameroot), _FILE_OPEN_MODE) +- self.offsetLineCache = {} # Table of (pathname, offset) -> (line, nextOffset) +- self.rewind() +- self.shelfname = os.path.join(WNSEARCHDIR, pos + ".pyidx") +- try: +- import shelve +- self.indexCache = shelve.open(self.shelfname, 'r') +- except: +- pass ++ self.pos = pos ++ self.file = open(_indexFilePathname(filenameroot), _FILE_OPEN_MODE) ++ self.offsetLineCache = {} # Table of (pathname, offset) -> (line, nextOffset) ++ self.rewind() ++ self.shelfname = os.path.join(WNSEARCHDIR, pos + ".pyidx") ++ try: ++ import shelve ++ self.indexCache = shelve.open(self.shelfname, 'r') ++ except: ++ pass + + def rewind(self): +- self.file.seek(0) +- while 1: +- offset = self.file.tell() +- line = self.file.readline() +- if (line[0] != ' '): +- break +- self.nextIndex = 0 +- self.nextOffset = offset ++ self.file.seek(0) ++ while 1: ++ offset = self.file.tell() ++ line = self.file.readline() ++ if (line[0] != ' '): ++ break ++ self.nextIndex = 0 ++ self.nextOffset = offset + + # + # Sequence protocol (an _IndexFile's items are its lines) + # +- def __nonzero__(self): +- return 1 ++ def __bool__(self): ++ return 1 + + def __len__(self): +- if hasattr(self, 'indexCache'): +- return len(self.indexCache) +- self.rewind() +- lines = 0 +- while 1: +- line = self.file.readline() +- if line == "": +- break +- lines = lines + 1 +- return lines ++ if hasattr(self, 'indexCache'): ++ return len(self.indexCache) ++ self.rewind() ++ lines = 0 ++ while 1: ++ line = self.file.readline() ++ if line == "": ++ break ++ lines = lines + 1 ++ return lines + +- def __nonzero__(self): +- return 1 ++ def __bool__(self): ++ return 1 + + def __getitem__(self, index): +- if isinstance(index, StringType): +- if hasattr(self, 'indexCache'): +- return self.indexCache[index] +- return binarySearchFile(self.file, index, self.offsetLineCache, 8) +- elif isinstance(index, IntType): +- if hasattr(self, 'indexCache'): +- return self.get(self.keys[index]) +- if index < self.nextIndex: +- self.rewind() +- while self.nextIndex <= index: +- self.file.seek(self.nextOffset) +- line = self.file.readline() +- if line == "": +- raise IndexError, "index out of range" +- self.nextIndex = self.nextIndex + 1 +- self.nextOffset = self.file.tell() +- return line +- else: +- raise TypeError, "%s is not a String or Int" % `index` +- ++ if isinstance(index, StringType): ++ if hasattr(self, 'indexCache'): ++ return self.indexCache[index] ++ return binarySearchFile(self.file, index, self.offsetLineCache, 8) ++ elif isinstance(index, IntType): ++ if hasattr(self, 'indexCache'): ++ return self.get(self.keys[index]) ++ if index < self.nextIndex: ++ self.rewind() ++ while self.nextIndex <= index: ++ self.file.seek(self.nextOffset) ++ line = self.file.readline() ++ if line == "": ++ raise IndexError("index out of range") ++ self.nextIndex = self.nextIndex + 1 ++ self.nextOffset = self.file.tell() ++ return line ++ else: ++ raise TypeError("%s is not a String or Int" % repr(index)) ++ + # + # Dictionary protocol + # +@@ -1009,62 +1009,62 @@ class _IndexFile: + # + + def get(self, key, default=None): +- try: +- return self[key] +- except LookupError: +- return default ++ try: ++ return self[key] ++ except LookupError: ++ return default + + def keys(self): +- if hasattr(self, 'indexCache'): +- keys = self.indexCache.keys() +- keys.sort() +- return keys +- else: +- keys = [] +- self.rewind() +- while 1: +- line = self.file.readline() +- if not line: break ++ if hasattr(self, 'indexCache'): ++ keys = list(self.indexCache.keys()) ++ keys.sort() ++ return keys ++ else: ++ keys = [] ++ self.rewind() ++ while 1: ++ line = self.file.readline() ++ if not line: break + key = line.split(' ', 1)[0] +- keys.append(key.replace('_', ' ')) +- return keys ++ keys.append(key.replace('_', ' ')) ++ return keys + + def has_key(self, key): +- key = key.replace(' ', '_') # test case: V['haze over'] +- if hasattr(self, 'indexCache'): +- return self.indexCache.has_key(key) +- return self.get(key) != None ++ key = key.replace(' ', '_') # test case: V['haze over'] ++ if hasattr(self, 'indexCache'): ++ return key in self.indexCache ++ return self.get(key) != None + + # + # Index file + # + + def _buildIndexCacheFile(self): +- import shelve +- import os +- print "Building %s:" % (self.shelfname,), +- tempname = self.shelfname + ".temp" +- try: +- indexCache = shelve.open(tempname) +- self.rewind() +- count = 0 +- while 1: +- offset, line = self.file.tell(), self.file.readline() +- if not line: break +- key = line[:string.find(line, ' ')] +- if (count % 1000) == 0: +- print "%s..." % (key,), +- import sys +- sys.stdout.flush() +- indexCache[key] = line +- count = count + 1 +- indexCache.close() +- os.rename(tempname, self.shelfname) +- finally: +- try: os.remove(tempname) +- except: pass +- print "done." +- self.indexCache = shelve.open(self.shelfname, 'r') ++ import shelve ++ import os ++ print("Building %s:" % (self.shelfname,), end=' ') ++ tempname = self.shelfname + ".temp" ++ try: ++ indexCache = shelve.open(tempname) ++ self.rewind() ++ count = 0 ++ while 1: ++ offset, line = self.file.tell(), self.file.readline() ++ if not line: break ++ key = line[:string.find(line, ' ')] ++ if (count % 1000) == 0: ++ print("%s..." % (key,), end=' ') ++ import sys ++ sys.stdout.flush() ++ indexCache[key] = line ++ count = count + 1 ++ indexCache.close() ++ os.rename(tempname, self.shelfname) ++ finally: ++ try: os.remove(tempname) ++ except: pass ++ print("done.") ++ self.indexCache = shelve.open(self.shelfname, 'r') + + + # +@@ -1091,20 +1091,20 @@ getword, getsense, getsynset = getWord, getSense, getS + + def _requirePointerType(pointerType): + if pointerType not in POINTER_TYPES: +- raise TypeError, `pointerType` + " is not a pointer type" ++ raise TypeError(repr(pointerType) + " is not a pointer type") + return pointerType + + def _compareInstances(a, b, fields): + """"Return -1, 0, or 1 according to a comparison first by type, + then by class, and finally by each of fields.""" # " <- for emacs + if not hasattr(b, '__class__'): +- return cmp(type(a), type(b)) ++ return cmp(type(a), type(b)) + elif a.__class__ != b.__class__: +- return cmp(a.__class__, b.__class__) ++ return cmp(a.__class__, b.__class__) + for field in fields: +- diff = cmp(getattr(a, field), getattr(b, field)) +- if diff: +- return diff ++ diff = cmp(getattr(a, field), getattr(b, field)) ++ if diff: ++ return diff + return 0 + + def _equalsIgnoreCase(a, b): +@@ -1122,14 +1122,14 @@ def _equalsIgnoreCase(a, b): + # + def _dataFilePathname(filenameroot): + if os.name in ('dos', 'nt'): +- path = os.path.join(WNSEARCHDIR, filenameroot + ".dat") ++ path = os.path.join(WNSEARCHDIR, filenameroot + ".dat") + if os.path.exists(path): + return path + return os.path.join(WNSEARCHDIR, "data." + filenameroot) + + def _indexFilePathname(filenameroot): + if os.name in ('dos', 'nt'): +- path = os.path.join(WNSEARCHDIR, filenameroot + ".idx") ++ path = os.path.join(WNSEARCHDIR, filenameroot + ".idx") + if os.path.exists(path): + return path + return os.path.join(WNSEARCHDIR, "index." + filenameroot) +@@ -1146,30 +1146,30 @@ def binarySearchFile(file, key, cache={}, cacheDepth=- + #if count > 20: + # raise "infinite loop" + lastState = start, end +- middle = (start + end) / 2 +- if cache.get(middle): +- offset, line = cache[middle] +- else: +- file.seek(max(0, middle - 1)) +- if middle > 0: +- file.readline() +- offset, line = file.tell(), file.readline() +- if currentDepth < cacheDepth: +- cache[middle] = (offset, line) ++ middle = (start + end) / 2 ++ if cache.get(middle): ++ offset, line = cache[middle] ++ else: ++ file.seek(max(0, middle - 1)) ++ if middle > 0: ++ file.readline() ++ offset, line = file.tell(), file.readline() ++ if currentDepth < cacheDepth: ++ cache[middle] = (offset, line) + #print start, middle, end, offset, line, +- if offset > end: +- assert end != middle - 1, "infinite loop" +- end = middle - 1 +- elif line[:keylen] == key:# and line[keylen + 1] == ' ': +- return line ++ if offset > end: ++ assert end != middle - 1, "infinite loop" ++ end = middle - 1 ++ elif line[:keylen] == key:# and line[keylen + 1] == ' ': ++ return line + #elif offset == end: + # return None +- elif line > key: +- assert end != middle - 1, "infinite loop" +- end = middle - 1 +- elif line < key: +- start = offset + len(line) - 1 +- currentDepth = currentDepth + 1 ++ elif line > key: ++ assert end != middle - 1, "infinite loop" ++ end = middle - 1 ++ elif line < key: ++ start = offset + len(line) - 1 ++ currentDepth = currentDepth + 1 + thisState = start, end + if lastState == thisState: + # detects the condition where we're searching past the end +@@ -1198,12 +1198,12 @@ def _index(key, sequence, testfn=None, keyfn=None): + """ + index = 0 + for element in sequence: +- value = element +- if keyfn: +- value = keyfn(value) +- if (not testfn and value == key) or (testfn and testfn(value, key)): +- return index +- index = index + 1 ++ value = element ++ if keyfn: ++ value = keyfn(value) ++ if (not testfn and value == key) or (testfn and testfn(value, key)): ++ return index ++ index = index + 1 + return None + + def _partition(sequence, size, count): +@@ -1216,7 +1216,7 @@ def _partition(sequence, size, count): + + partitions = [] + for index in range(0, size * count, size): +- partitions.append(sequence[index:index + size]) ++ partitions.append(sequence[index:index + size]) + return (partitions, sequence[size * count:]) + + +@@ -1261,49 +1261,49 @@ class _LRUCache: + but the two implementations aren't directly comparable.""" + + def __init__(this, capacity): +- this.capacity = capacity +- this.clear() ++ this.capacity = capacity ++ this.clear() + + def clear(this): +- this.values = {} +- this.history = {} +- this.oldestTimestamp = 0 +- this.nextTimestamp = 1 ++ this.values = {} ++ this.history = {} ++ this.oldestTimestamp = 0 ++ this.nextTimestamp = 1 + + def removeOldestEntry(this): +- while this.oldestTimestamp < this.nextTimestamp: +- if this.history.get(this.oldestTimestamp): +- key = this.history[this.oldestTimestamp] +- del this.history[this.oldestTimestamp] +- del this.values[key] +- return +- this.oldestTimestamp = this.oldestTimestamp + 1 ++ while this.oldestTimestamp < this.nextTimestamp: ++ if this.history.get(this.oldestTimestamp): ++ key = this.history[this.oldestTimestamp] ++ del this.history[this.oldestTimestamp] ++ del this.values[key] ++ return ++ this.oldestTimestamp = this.oldestTimestamp + 1 + + def setCapacity(this, capacity): +- if capacity == 0: +- this.clear() +- else: +- this.capacity = capacity +- while len(this.values) > this.capacity: +- this.removeOldestEntry() ++ if capacity == 0: ++ this.clear() ++ else: ++ this.capacity = capacity ++ while len(this.values) > this.capacity: ++ this.removeOldestEntry() + + def get(this, key, loadfn=None): +- value = None +- if this.values: +- pair = this.values.get(key) +- if pair: +- (value, timestamp) = pair +- del this.history[timestamp] +- if value == None: +- value = loadfn and loadfn() +- if this.values != None: +- timestamp = this.nextTimestamp +- this.nextTimestamp = this.nextTimestamp + 1 +- this.values[key] = (value, timestamp) +- this.history[timestamp] = key +- if len(this.values) > this.capacity: +- this.removeOldestEntry() +- return value ++ value = None ++ if this.values: ++ pair = this.values.get(key) ++ if pair: ++ (value, timestamp) = pair ++ del this.history[timestamp] ++ if value == None: ++ value = loadfn and loadfn() ++ if this.values != None: ++ timestamp = this.nextTimestamp ++ this.nextTimestamp = this.nextTimestamp + 1 ++ this.values[key] = (value, timestamp) ++ this.history[timestamp] = key ++ if len(this.values) > this.capacity: ++ this.removeOldestEntry() ++ return value + + + class _NullCache: +@@ -1311,10 +1311,10 @@ class _NullCache: + LRUCache implements), but doesn't store any values.""" + + def clear(): +- pass ++ pass + + def get(this, key, loadfn=None): +- return loadfn and loadfn() ++ return loadfn and loadfn() + + + DEFAULT_CACHE_CAPACITY = 1000 +@@ -1327,7 +1327,7 @@ def disableCache(): + def enableCache(): + """Enable the entity cache.""" + if not isinstance(_entityCache, LRUCache): +- _entityCache = _LRUCache(size) ++ _entityCache = _LRUCache(size) + + def clearCache(): + """Clear the entity cache.""" +@@ -1365,36 +1365,36 @@ def _initializePOSTables(): + _POSNormalizationTable = {} + _POStoDictionaryTable = {} + for pos, abbreviations in ( +- (NOUN, "noun n n."), +- (VERB, "verb v v."), +- (ADJECTIVE, "adjective adj adj. a s"), +- (ADVERB, "adverb adv adv. r")): +- tokens = string.split(abbreviations) +- for token in tokens: +- _POSNormalizationTable[token] = pos +- _POSNormalizationTable[string.upper(token)] = pos ++ (NOUN, "noun n n."), ++ (VERB, "verb v v."), ++ (ADJECTIVE, "adjective adj adj. a s"), ++ (ADVERB, "adverb adv adv. r")): ++ tokens = string.split(abbreviations) ++ for token in tokens: ++ _POSNormalizationTable[token] = pos ++ _POSNormalizationTable[string.upper(token)] = pos + for dict in Dictionaries: +- _POSNormalizationTable[dict] = dict.pos +- _POStoDictionaryTable[dict.pos] = dict ++ _POSNormalizationTable[dict] = dict.pos ++ _POStoDictionaryTable[dict.pos] = dict + + _initializePOSTables() + + def _normalizePOS(pos): + norm = _POSNormalizationTable.get(pos) + if norm: +- return norm +- raise TypeError, `pos` + " is not a part of speech type" ++ return norm ++ raise TypeError(repr(pos) + " is not a part of speech type") + + def _dictionaryFor(pos): + pos = _normalizePOS(pos) + dict = _POStoDictionaryTable.get(pos) + if dict == None: +- raise RuntimeError, "The " + `pos` + " dictionary has not been created" ++ raise RuntimeError("The " + repr(pos) + " dictionary has not been created") + return dict + + def buildIndexFiles(): + for dict in Dictionaries: +- dict._buildIndexCacheFile() ++ dict._buildIndexCacheFile() + + + # +@@ -1404,7 +1404,7 @@ def buildIndexFiles(): + def _testKeys(): + #This is slow, so don't do it as part of the normal test procedure. + for dictionary in Dictionaries: +- dictionary._testKeys() ++ dictionary._testKeys() + + def _test(reset=0): + import doctest, wordnet diff --git a/textproc/py-xhtml2pdf/Makefile b/textproc/py-xhtml2pdf/Makefile index 580113624f0..6a3dc20bf36 100644 --- a/textproc/py-xhtml2pdf/Makefile +++ b/textproc/py-xhtml2pdf/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= xhtml2pdf -PORTVERSION= 0.2.5 +PORTVERSION= 0.2.6 CATEGORIES= textproc python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -13,12 +13,12 @@ LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE.txt RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}arabic-reshaper>=2.1.0:textproc/py-arabic-reshaper@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}html5lib>=1.0:www/py-html5lib@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pdf2>=0:print/py-pdf2@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pillow>=0:graphics/py-pillow@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}html5lib>=1.0.1:www/py-html5lib@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pillow>=8.1.1:graphics/py-pillow@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pypdf3>=1.0.5:print/py-pypdf3@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}python-bidi>=0.4.2:textproc/py-python-bidi@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}reportlab>=3.3.0:print/py-reportlab@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}six>=0:devel/py-six@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}reportlab>=3.5.53:print/py-reportlab@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}svglib>=1.2.1:converters/py-svglib@${PY_FLAVOR} TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}nose>=1.0:devel/py-nose@${PY_FLAVOR} USES= python:3.6+ diff --git a/textproc/py-xhtml2pdf/distinfo b/textproc/py-xhtml2pdf/distinfo index 306f9aea0c2..6d6e688e804 100644 --- a/textproc/py-xhtml2pdf/distinfo +++ b/textproc/py-xhtml2pdf/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1602317887 -SHA256 (xhtml2pdf-0.2.5.tar.gz) = 6797e974fac66f0efbe927c1539a2756ca4fe8777eaa5882bac132fc76b39421 -SIZE (xhtml2pdf-0.2.5.tar.gz) = 100124 +TIMESTAMP = 1647264686 +SHA256 (xhtml2pdf-0.2.6.tar.gz) = 1cb72782ac5fb67b9fcab20a37a65c6d61afde4cfd5a090ff5ba6d737c589a97 +SIZE (xhtml2pdf-0.2.6.tar.gz) = 101747 diff --git a/textproc/py-zope.structuredtext/files/patch-2to3 b/textproc/py-zope.structuredtext/files/patch-2to3 new file mode 100644 index 00000000000..495107b3a18 --- /dev/null +++ b/textproc/py-zope.structuredtext/files/patch-2to3 @@ -0,0 +1,138 @@ +--- src/zope/structuredtext/docbook.py.orig 2010-12-03 16:46:32 UTC ++++ src/zope/structuredtext/docbook.py +@@ -81,7 +81,7 @@ class DocBook: + getattr(self, self.element_types[c.getNodeName()] + )(c, level, output) + except: +- print "failed", c.getNodeName(), c ++ print("failed", c.getNodeName(), c) + output('\n') + + def description(self, doc, level, output): +--- src/zope/structuredtext/stng.py.orig 2010-12-03 16:46:32 UTC ++++ src/zope/structuredtext/stng.py +@@ -14,7 +14,7 @@ + """ + + import re +-import stdom ++from . import stdom + + __metaclass__ = type + +@@ -50,7 +50,7 @@ def display(struct): + orignal paragraphs. + """ + if struct.getColorizableTexts(): +- print '\n'.join(struct.getColorizableTexts()) ++ print('\n'.join(struct.getColorizableTexts())) + if struct.getSubparagraphs(): + for x in struct.getSubparagraphs(): + display(x) +@@ -61,7 +61,7 @@ def display2(struct): + orignal paragraphs. + """ + if struct.getNodeValue(): +- print struct.getNodeValue(),"\n" ++ print(struct.getNodeValue(),"\n") + if struct.getSubparagraphs(): + for x in struct.getSubparagraphs(): + display(x) +@@ -70,11 +70,11 @@ def findlevel(levels,indent): + """Remove all level information of levels with a greater level of + indentation. Then return which level should insert this paragraph + """ +- keys = levels.keys() ++ keys = list(levels.keys()) + for key in keys: + if levels[key] > indent: + del(levels[key]) +- keys = levels.keys() ++ keys = list(levels.keys()) + if not(keys): + return 0 + else: +@@ -180,8 +180,8 @@ class StructuredTextParagraph(stdom.Element): + self._src = src + self._subs = list(subs) + +- self._attributes = kw.keys() +- for k, v in kw.items(): ++ self._attributes = list(kw.keys()) ++ for k, v in list(kw.items()): + setattr(self, k, v) + + def getChildren(self): +@@ -223,7 +223,7 @@ class StructuredTextParagraph(stdom.Element): + ('%s(' % self.__class__.__name__) + +str(self._src)+', [' + ) +- for p in self._subs: a(`p`) ++ for p in self._subs: a(repr(p)) + a((' '*(self.indent or 0))+'])') + return '\n'.join(r) + +@@ -248,7 +248,7 @@ class StructuredTextDocument(StructuredTextParagraph): + def __repr__(self): + r=[]; a=r.append + a('%s([' % self.__class__.__name__) +- for p in self._subs: a(`p`+',') ++ for p in self._subs: a(repr(p)+',') + a('])') + return '\n'.join(r) + +@@ -470,8 +470,8 @@ class StructuredTextMarkup(stdom.Element): + + def __init__(self, value, **kw): + self._value = value +- self._attributes = kw.keys() +- for key, value in kw.items(): ++ self._attributes = list(kw.keys()) ++ for key, value in list(kw.items()): + setattr(self, key, value) + + def getChildren(self): +@@ -487,7 +487,7 @@ class StructuredTextMarkup(stdom.Element): + self._value=v[0] + + def __repr__(self): +- return '%s(%s)' % (self.__class__.__name__, `self._value`) ++ return '%s(%s)' % (self.__class__.__name__, repr(self._value)) + + class StructuredTextLiteral(StructuredTextMarkup): + def getColorizableTexts(self): +--- src/zope/structuredtext/tests.py.orig 2010-12-03 16:46:32 UTC ++++ src/zope/structuredtext/tests.py +@@ -45,7 +45,7 @@ class StngTests(unittest.TestCase): + doc = Document() + raw_text = readFile(regressions, f) + text = stng.structurize(raw_text) +- self.assert_(doc(text)) ++ self.assertTrue(doc(text)) + + def testRegressionsTests(self): + # HTML regression test +@@ -73,9 +73,9 @@ class BasicTests(unittest.TestCase): + doc = DocumentWithImages()(doc) + output = HTMLWithImages()(doc, level=1) + if not expected in output: +- print "Text: ", stxtxt.encode('utf-8') +- print "Converted:", output.encode('utf-8') +- print "Expected: ", expected.encode('utf-8') ++ print("Text: ", stxtxt.encode('utf-8')) ++ print("Converted:", output.encode('utf-8')) ++ print("Expected: ", expected.encode('utf-8')) + self.fail("'%s' not in result" % expected) + + def testUnderline(self): +@@ -279,8 +279,8 @@ class BasicTests(unittest.TestCase): + def testUnicodeContent(self): + # This fails because ST uses the default locale to get "letters" + # whereas it should use \w+ and re.U if the string is Unicode. +- self._test(u"h\xe9 **y\xe9** xx", +- u"h\xe9 y\xe9 xx") ++ self._test("h\xe9 **y\xe9** xx", ++ "h\xe9 y\xe9 xx") + + def test_suite(): + suite = unittest.TestSuite() diff --git a/textproc/py-zope.tal/files/patch-2to3 b/textproc/py-zope.tal/files/patch-2to3 new file mode 100644 index 00000000000..406cba68798 --- /dev/null +++ b/textproc/py-zope.tal/files/patch-2to3 @@ -0,0 +1,1119 @@ +--- src/zope/tal/driver.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/driver.py +@@ -43,7 +43,7 @@ import sys + import getopt + + if __name__ == "__main__": +- import setpath # Local hack to tweak sys.path etc. ++ from . import setpath # Local hack to tweak sys.path etc. + + # Import local classes + import zope.tal.taldefs +@@ -104,9 +104,9 @@ ENGINES = {'test23.html': TestEngine, + } + + def usage(code, msg=''): +- print >> sys.stderr, __doc__ ++ print(__doc__, file=sys.stderr) + if msg: +- print >> sys.stderr, msg ++ print(msg, file=sys.stderr) + sys.exit(code) + + def main(): +@@ -120,7 +120,7 @@ def main(): + try: + opts, args = getopt.getopt(sys.argv[1:], "hHxlmstia", + ['help', 'html', 'xml']) +- except getopt.error, msg: ++ except getopt.error as msg: + usage(2, msg) + for opt, arg in opts: + if opt in ('-h', '--help'): +--- src/zope/tal/dummyengine.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/dummyengine.py +@@ -100,12 +100,12 @@ class DummyEngine(object): + if type == "not": + return not self.evaluate(expr) + if type == "exists": +- return self.locals.has_key(expr) or self.globals.has_key(expr) ++ return expr in self.locals or expr in self.globals + if type == "python": + try: + return eval(expr, self.globals, self.locals) + except: +- raise TALExpressionError("evaluation error in %s" % `expr`) ++ raise TALExpressionError("evaluation error in %s" % repr(expr)) + if type == "position": + # Insert the current source file name, line number, + # and column offset. +@@ -114,17 +114,17 @@ class DummyEngine(object): + else: + lineno, offset = None, None + return '%s (%s,%s)' % (self.source_file, lineno, offset) +- raise TALExpressionError("unrecognized expression: " + `expression`) ++ raise TALExpressionError("unrecognized expression: " + repr(expression)) + + # implementation; can be overridden + def evaluatePathOrVar(self, expr): + expr = expr.strip() +- if self.locals.has_key(expr): ++ if expr in self.locals: + return self.locals[expr] +- elif self.globals.has_key(expr): ++ elif expr in self.globals: + return self.globals[expr] + else: +- raise TALExpressionError("unknown variable: %s" % `expr`) ++ raise TALExpressionError("unknown variable: %s" % repr(expr)) + + def evaluateValue(self, expr): + return self.evaluate(expr) +@@ -134,7 +134,7 @@ class DummyEngine(object): + + def evaluateText(self, expr): + text = self.evaluate(expr) +- if isinstance(text, (str, unicode, Message)): ++ if isinstance(text, (str, Message)): + return text + if text is not None and text is not Default: + text = str(text) +@@ -159,7 +159,7 @@ class DummyEngine(object): + macro = self.macros[localName] + else: + # External macro +- import driver ++ from . import driver + program, macros = driver.compilefile(file) + macro = macros.get(localName) + if not macro: +@@ -208,7 +208,7 @@ class DummyEngine(object): + locals = self.locals.copy() + + assert lang == 'text/server-python' +- import sys, StringIO ++ import sys, io + + # Removing probable comments + if code.strip().startswith(''): +@@ -216,15 +216,15 @@ class DummyEngine(object): + + # Prepare code. + lines = code.split('\n') +- lines = filter(lambda l: l.strip() != '', lines) ++ lines = [l for l in lines if l.strip() != ''] + code = '\n'.join(lines) + # This saves us from all indentation issues :) + if code.startswith(' ') or code.startswith('\t'): + code = 'if 1 == 1:\n' + code + '\n' + tmp = sys.stdout +- sys.stdout = StringIO.StringIO() ++ sys.stdout = io.StringIO() + try: +- exec code in globals, locals ++ exec(code, globals, locals) + finally: + result = sys.stdout + sys.stdout = tmp +@@ -246,7 +246,7 @@ class Iterator(object): + self.engine = engine + self.nextIndex = 0 + +- def next(self): ++ def __next__(self): + i = self.nextIndex + try: + item = self.seq[i] +@@ -264,7 +264,7 @@ class DummyTranslationDomain(object): + msgids = {} + + def appendMsgid(self, domain, data): +- if not self.msgids.has_key(domain): ++ if domain not in self.msgids: + self.msgids[domain] = [] + self.msgids[domain].append(data) + +@@ -308,7 +308,7 @@ class DummyTranslationDomain(object): + self.appendMsgid(domain, (msgid, mapping)) + + def repl(m): +- return unicode(mapping[m.group(m.lastindex).lower()]) ++ return str(mapping[m.group(m.lastindex).lower()]) + cre = re.compile(r'\$(?:([_A-Za-z][-\w]*)|\{([_A-Za-z][-\w]*)\})') + return cre.sub(repl, text) + +--- src/zope/tal/htmltalparser.py.orig 2012-02-14 09:53:32 UTC ++++ src/zope/tal/htmltalparser.py +@@ -14,7 +14,7 @@ + """Parse HTML and compile to TALInterpreter intermediate code. + """ + +-from HTMLParser import HTMLParser, HTMLParseError ++from html.parser import HTMLParser, HTMLParseError + + from zope.tal.taldefs import (ZOPE_METAL_NS, ZOPE_TAL_NS, ZOPE_I18N_NS, + METALError, TALError, I18NError) +@@ -118,7 +118,7 @@ class HTMLTALParser(HTMLParser): + f.close() + try: + self.parseString(data) +- except TALError, e: ++ except TALError as e: + e.setFile(file) + raise + +@@ -141,7 +141,7 @@ class HTMLTALParser(HTMLParser): + = self.process_ns(tag, attrs) + if tag in EMPTY_HTML_TAGS and "content" in taldict: + raise TALError( +- "empty HTML tags cannot use tal:content: %s" % `tag`, ++ "empty HTML tags cannot use tal:content: %s" % repr(tag), + self.getpos()) + # Support for inline Python code. + if tag == 'script': +@@ -163,7 +163,7 @@ class HTMLTALParser(HTMLParser): + if "content" in taldict: + if tag in EMPTY_HTML_TAGS: + raise TALError( +- "empty HTML tags cannot use tal:content: %s" % `tag`, ++ "empty HTML tags cannot use tal:content: %s" % repr(tag), + self.getpos()) + self.gen.emitStartElement(tag, attrlist, taldict, metaldict, + i18ndict, self.getpos()) +--- src/zope/tal/interfaces.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/interfaces.py +@@ -61,13 +61,14 @@ class ITALExpressionEngine(Interface): + using the 'is' operator in Python. + """ + +- def setPosition((lineno, offset)): ++ def setPosition(xxx_todo_changeme): + """Inform the engine of the current position in the source file. + + This is used to allow the evaluation engine to report + execution errors so that site developers can more easily + locate the offending expression. + """ ++ (lineno, offset) = xxx_todo_changeme + + def setSourceFile(filename): + """Inform the engine of the name of the current source file. +@@ -128,12 +129,13 @@ class ITALExpressionEngine(Interface): + No constraints are imposed on the return value. + """ + +- def createErrorInfo(exception, (lineno, offset)): ++ def createErrorInfo(exception, xxx_todo_changeme1): + """Returns an ITALExpressionErrorInfo object. + + The returned object is used to provide information about the + error condition for the on-error handler. + """ ++ (lineno, offset) = xxx_todo_changeme1 + + def setGlobal(name, value): + """Set a global variable. +--- src/zope/tal/ndiff.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/ndiff.py +@@ -114,6 +114,7 @@ TRACE = 0 + + # define what "junk" means + import re ++from functools import reduce + + def IS_LINE_JUNK(line, pat=re.compile(r"\s*#?\s*$").match): + return pat(line) is not None +@@ -209,7 +210,7 @@ class SequenceMatcher(object): + b = self.b + self.b2j = b2j = {} + self.b2jhas = b2jhas = b2j.has_key +- for i in xrange(len(b)): ++ for i in range(len(b)): + elt = b[i] + if b2jhas(elt): + b2j[elt].append(i) +@@ -222,7 +223,7 @@ class SequenceMatcher(object): + # saved. + isjunk, junkdict = self.isjunk, {} + if isjunk: +- for elt in b2j.keys(): ++ for elt in list(b2j.keys()): + if isjunk(elt): + junkdict[elt] = 1 # value irrelevant; it's a set + del b2j[elt] +@@ -281,7 +282,7 @@ class SequenceMatcher(object): + # junk-free match ending with a[i-1] and b[j] + j2len = {} + nothing = [] +- for i in xrange(alo, ahi): ++ for i in range(alo, ahi): + # look at all instances of a[i] in b; note that because + # b2j has no junk keys, the loop is skipped if a[i] is junk + j2lenget = j2len.get +@@ -314,8 +315,8 @@ class SequenceMatcher(object): + bestsize = bestsize + 1 + + if TRACE: +- print "get_matching_blocks", alo, ahi, blo, bhi +- print " returns", besti, bestj, bestsize ++ print("get_matching_blocks", alo, ahi, blo, bhi) ++ print(" returns", besti, bestj, bestsize) + return besti, bestj, bestsize + + def get_matching_blocks(self): +@@ -326,7 +327,7 @@ class SequenceMatcher(object): + self.__helper(0, la, 0, lb, self.matching_blocks) + self.matching_blocks.append((la, lb, 0)) + if TRACE: +- print '*** matching blocks', self.matching_blocks ++ print('*** matching blocks', self.matching_blocks) + return self.matching_blocks + + # builds list of matching blocks covering a[alo:ahi] and +@@ -417,8 +418,8 @@ class SequenceMatcher(object): + + # meant for dumping lines + def dump(tag, x, lo, hi): +- for i in xrange(lo, hi): +- print tag, x[i], ++ for i in range(lo, hi): ++ print(tag, x[i], end=' ') + + def plain_replace(a, alo, ahi, b, blo, bhi): + assert alo < ahi and blo < bhi +@@ -438,7 +439,7 @@ def plain_replace(a, alo, ahi, b, blo, bhi): + + def fancy_replace(a, alo, ahi, b, blo, bhi): + if TRACE: +- print '*** fancy_replace', alo, ahi, blo, bhi ++ print('*** fancy_replace', alo, ahi, blo, bhi) + dump('>', a, alo, ahi) + dump('<', b, blo, bhi) + +@@ -451,10 +452,10 @@ def fancy_replace(a, alo, ahi, b, blo, bhi): + # search for the pair that matches best without being identical + # (identical lines must be junk lines, & we don't want to synch up + # on junk -- unless we have to) +- for j in xrange(blo, bhi): ++ for j in range(blo, bhi): + bj = b[j] + cruncher.set_seq2(bj) +- for i in xrange(alo, ahi): ++ for i in range(alo, ahi): + ai = a[i] + if ai == bj: + if eqi is None: +@@ -486,7 +487,7 @@ def fancy_replace(a, alo, ahi, b, blo, bhi): + # a[best_i] very similar to b[best_j]; eqi is None iff they're not + # identical + if TRACE: +- print '*** best_ratio', best_ratio, best_i, best_j ++ print('*** best_ratio', best_ratio, best_i, best_j) + dump('>', a, best_i, best_i+1) + dump('<', b, best_j, best_j+1) + +@@ -512,11 +513,11 @@ def fancy_replace(a, alo, ahi, b, blo, bhi): + atags = atags + ' ' * la + btags = btags + ' ' * lb + else: +- raise ValueError('unknown tag ' + `tag`) ++ raise ValueError('unknown tag ' + repr(tag)) + printq(aelt, belt, atags, btags) + else: + # the synch pair is identical +- print ' ', aelt, ++ print(' ', aelt, end=' ') + + # pump out diffs from after the synch point + fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi) +@@ -537,12 +538,12 @@ def printq(aline, bline, atags, btags): + common = min(count_leading(aline, "\t"), + count_leading(bline, "\t")) + common = min(common, count_leading(atags[:common], " ")) +- print "-", aline, ++ print("-", aline, end=' ') + if count_leading(atags, " ") < len(atags): +- print "?", "\t" * common + atags[common:] +- print "+", bline, ++ print("?", "\t" * common + atags[common:]) ++ print("+", bline, end=' ') + if count_leading(btags, " ") < len(btags): +- print "?", "\t" * common + btags[common:] ++ print("?", "\t" * common + btags[common:]) + + def count_leading(line, ch): + i, n = 0, len(line) +@@ -562,7 +563,7 @@ def fail(msg): + def fopen(fname): + try: + return open(fname, 'r') +- except IOError, detail: ++ except IOError as detail: + return fail("couldn't open " + fname + ": " + str(detail)) + + # open two files & spray the diff to stdout; return false iff a problem +@@ -586,7 +587,7 @@ def fcompare(f1name, f2name): + elif tag == 'equal': + dump(' ', a, alo, ahi) + else: +- raise ValueError('unknown tag ' + `tag`) ++ raise ValueError('unknown tag ' + repr(tag)) + + return 1 + +@@ -597,7 +598,7 @@ def main(args): + import getopt + try: + opts, args = getopt.getopt(args, "qr:") +- except getopt.error, detail: ++ except getopt.error as detail: + return fail(str(detail)) + noisy = 1 + qseen = rseen = 0 +@@ -621,8 +622,8 @@ def main(args): + return fail("need 2 filename args") + f1name, f2name = args + if noisy: +- print '-:', f1name +- print '+:', f2name ++ print('-:', f1name) ++ print('+:', f2name) + return fcompare(f1name, f2name) + + def restore(which): +@@ -631,7 +632,7 @@ def restore(which): + prefixes = (" ", tag) + for line in sys.stdin.readlines(): + if line[:2] in prefixes: +- print line[2:], ++ print(line[2:], end=' ') + + if __name__ == '__main__': + import sys +--- src/zope/tal/runtest.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/runtest.py +@@ -19,24 +19,24 @@ import os + import sys + import traceback + +-from cStringIO import StringIO ++from io import StringIO + + if __name__ == "__main__": +- import setpath # Local hack to tweak sys.path etc. ++ from . import setpath # Local hack to tweak sys.path etc. + + import zope.tal.driver + import zope.tal.tests.utils + + def showdiff(a, b): +- import ndiff ++ from . import ndiff + cruncher = ndiff.SequenceMatcher(ndiff.IS_LINE_JUNK, a, b) + for tag, alo, ahi, blo, bhi in cruncher.get_opcodes(): + if tag == "equal": + continue +- print nicerange(alo, ahi) + tag[0] + nicerange(blo, bhi) ++ print(nicerange(alo, ahi) + tag[0] + nicerange(blo, bhi)) + ndiff.dump('<', a, alo, ahi) + if a and b: +- print '---' ++ print('---') + ndiff.dump('>', b, blo, bhi) + + def nicerange(lo, hi): +@@ -80,10 +80,10 @@ def main(): + if arg.find("_sa") >= 0 and "-a" not in opts: + locopts.append("-a") + if not unittesting: +- print arg, ++ print(arg, end=' ') + sys.stdout.flush() + if zope.tal.tests.utils.skipxml and arg.endswith(".xml"): +- print "SKIPPED (XML parser not available)" ++ print("SKIPPED (XML parser not available)") + continue + save = sys.stdout, sys.argv + try: +@@ -98,13 +98,13 @@ def main(): + except: + errors = 1 + if quiet: +- print sys.exc_type ++ print(sys.exc_info()[0]) + sys.stdout.flush() + else: + if unittesting: +- print ++ print() + else: +- print "Failed:" ++ print("Failed:") + sys.stdout.flush() + traceback.print_exc() + continue +@@ -116,7 +116,7 @@ def main(): + f = open(outfile) + except IOError: + expected = None +- print "(missing file %s)" % outfile, ++ print("(missing file %s)" % outfile, end=' ') + else: + expected = f.readlines() + f.close() +@@ -127,12 +127,12 @@ def main(): + actual = readlines(stdout) + if actual == expected: + if not unittesting: +- print "OK" ++ print("OK") + else: + if unittesting: +- print ++ print() + else: +- print "not OK" ++ print("not OK") + errors = 1 + if not quiet and expected is not None: + showdiff(expected, actual) +--- src/zope/tal/talgenerator.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/talgenerator.py +@@ -69,7 +69,7 @@ class TALGenerator(object): + output = [] + collect = [] + cursor = 0 +- for cursor in xrange(len(program)+1): ++ for cursor in range(len(program)+1): + try: + item = program[cursor] + except IndexError: +@@ -197,8 +197,8 @@ class TALGenerator(object): + def compileExpression(self, expr): + try: + return self.expressionCompiler.compile(expr) +- except self.CompilerError, err: +- raise TALError('%s in expression %s' % (err.args[0], `expr`), ++ except self.CompilerError as err: ++ raise TALError('%s in expression %s' % (err.args[0], repr(expr)), + self.position) + + def pushProgram(self): +@@ -259,7 +259,7 @@ class TALGenerator(object): + m = re.match( + r"(?s)\s*(?:(global|local)\s+)?(%s)\s+(.*)\Z" % NAME_RE, part) + if not m: +- raise TALError("invalid define syntax: " + `part`, ++ raise TALError("invalid define syntax: " + repr(part), + self.position) + scope, name, expr = m.group(1, 2, 3) + scope = scope or "local" +@@ -293,7 +293,7 @@ class TALGenerator(object): + def emitRepeat(self, arg): + m = re.match("(?s)\s*(%s)\s+(.*)\Z" % NAME_RE, arg) + if not m: +- raise TALError("invalid repeat syntax: " + `arg`, ++ raise TALError("invalid repeat syntax: " + repr(arg), + self.position) + name, expr = m.group(1, 2) + cexpr = self.compileExpression(expr) +@@ -346,11 +346,11 @@ class TALGenerator(object): + def emitDefineMacro(self, macroName): + program = self.popProgram() + macroName = macroName.strip() +- if self.macros.has_key(macroName): +- raise METALError("duplicate macro definition: %s" % `macroName`, ++ if macroName in self.macros: ++ raise METALError("duplicate macro definition: %s" % repr(macroName), + self.position) + if not re.match('%s$' % NAME_RE, macroName): +- raise METALError("invalid macro name: %s" % `macroName`, ++ raise METALError("invalid macro name: %s" % repr(macroName), + self.position) + self.macros[macroName] = program + self.inMacroDef = self.inMacroDef - 1 +@@ -374,18 +374,18 @@ class TALGenerator(object): + program = self.popProgram() + slotName = slotName.strip() + if not re.match('%s$' % NAME_RE, slotName): +- raise METALError("invalid slot name: %s" % `slotName`, ++ raise METALError("invalid slot name: %s" % repr(slotName), + self.position) + self.emit("defineSlot", slotName, program) + + def emitFillSlot(self, slotName): + program = self.popProgram() + slotName = slotName.strip() +- if self.slots.has_key(slotName): +- raise METALError("duplicate fill-slot name: %s" % `slotName`, ++ if slotName in self.slots: ++ raise METALError("duplicate fill-slot name: %s" % repr(slotName), + self.position) + if not re.match('%s$' % NAME_RE, slotName): +- raise METALError("invalid slot name: %s" % `slotName`, ++ raise METALError("invalid slot name: %s" % repr(slotName), + self.position) + self.slots[slotName] = program + self.inMacroUse = 1 +@@ -449,13 +449,13 @@ class TALGenerator(object): + newlist = [] + for item in attrlist: + key = item[0] +- if repldict.has_key(key): ++ if key in repldict: + expr, xlat, msgid = repldict[key] + item = item[:2] + ("replace", expr, xlat, msgid) + del repldict[key] + newlist.append(item) + # Add dynamic-only attributes +- for key, (expr, xlat, msgid) in repldict.items(): ++ for key, (expr, xlat, msgid) in list(repldict.items()): + newlist.append((key, None, "insert", expr, xlat, msgid)) + return newlist + +@@ -482,25 +482,25 @@ class TALGenerator(object): + taldict["content"] = taldict.pop("replace") + replaced = True + +- for key, value in taldict.items(): ++ for key, value in list(taldict.items()): + if key not in taldefs.KNOWN_TAL_ATTRIBUTES: +- raise TALError("bad TAL attribute: " + `key`, position) ++ raise TALError("bad TAL attribute: " + repr(key), position) + if not (value or key == 'omit-tag'): + raise TALError("missing value for TAL attribute: " + +- `key`, position) +- for key, value in metaldict.items(): ++ repr(key), position) ++ for key, value in list(metaldict.items()): + if key not in taldefs.KNOWN_METAL_ATTRIBUTES: +- raise METALError("bad METAL attribute: " + `key`, ++ raise METALError("bad METAL attribute: " + repr(key), + position) + if not value: + raise TALError("missing value for METAL attribute: " + +- `key`, position) +- for key, value in i18ndict.items(): ++ repr(key), position) ++ for key, value in list(i18ndict.items()): + if key not in taldefs.KNOWN_I18N_ATTRIBUTES: +- raise I18NError("bad i18n attribute: " + `key`, position) ++ raise I18NError("bad i18n attribute: " + repr(key), position) + if not value and key in ("attributes", "data", "id"): + raise I18NError("missing value for i18n attribute: " + +- `key`, position) ++ repr(key), position) + + todo = {} + defineMacro = metaldict.get("define-macro") +@@ -681,7 +681,7 @@ class TALGenerator(object): + i18nattrs = {} + # Convert repldict's name-->expr mapping to a + # name-->(compiled_expr, translate) mapping +- for key, value in repldict.items(): ++ for key, value in list(repldict.items()): + if i18nattrs.get(key, None): + raise I18NError( + "attribute [%s] cannot both be part of tal:attributes" +--- src/zope/tal/talgettext.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/talgettext.py +@@ -62,16 +62,16 @@ NLSTR = '"\n"' + + def usage(code, msg=''): + # Python 2.1 required +- print >> sys.stderr, __doc__ ++ print(__doc__, file=sys.stderr) + if msg: +- print >> sys.stderr, msg ++ print(msg, file=sys.stderr) + sys.exit(code) + + + class POTALInterpreter(TALInterpreter): + def translate(self, msgid, default=None, i18ndict=None, obj=None): + if default is None: +- default = getattr(msgid, 'default', unicode(msgid)) ++ default = getattr(msgid, 'default', str(msgid)) + # If no i18n dict exists yet, create one. + if i18ndict is None: + i18ndict = {} +@@ -126,15 +126,15 @@ class POEngine(DummyEngine): + if msgid not in domain: + domain[msgid] = [] + else: +- msgids = domain.keys() ++ msgids = list(domain.keys()) + idx = msgids.index(msgid) + existing_msgid = msgids[idx] + if msgid.default != existing_msgid.default: + references = '\n'.join([location[0]+':'+str(location[1]) for location in domain[msgid]]) +- print >> sys.stderr, (u"Warning: msgid '%s' in %s already exists " \ ++ print(("Warning: msgid '%s' in %s already exists " \ + "with a different default (bad: %s, should be: %s)\n" \ + "The references for the existent value are:\n%s\n" % \ +- (msgid, self.file+':'+str(position), msgid.default.encode('utf-8'), existing_msgid.default.encode('utf-8'), references)).encode('utf-8') ++ (msgid, self.file+':'+str(position), msgid.default.encode('utf-8'), existing_msgid.default.encode('utf-8'), references)).encode('utf-8'), file=sys.stderr) + domain[msgid].append((self.file, position)) + return 'x' + +@@ -170,8 +170,8 @@ class UpdatePOEngine(POEngine): + + try: + lines = open(self._filename).readlines() +- except IOError, msg: +- print >> sys.stderr, msg ++ except IOError as msg: ++ print(msg, file=sys.stderr) + sys.exit(1) + + section = None +@@ -213,9 +213,9 @@ class UpdatePOEngine(POEngine): + elif section == STR: + msgstr += '%s\n' % l + else: +- print >> sys.stderr, 'Syntax error on %s:%d' % (infile, lno), \ +- 'before:' +- print >> sys.stderr, l ++ print('Syntax error on %s:%d' % (infile, lno), \ ++ 'before:', file=sys.stderr) ++ print(l, file=sys.stderr) + sys.exit(1) + # Add last entry + if section == STR: +@@ -243,7 +243,7 @@ def main(): + sys.argv[1:], + 'ho:u:', + ['help', 'output=', 'update=']) +- except getopt.error, msg: ++ except getopt.error as msg: + usage(1, msg) + + outfile = None +@@ -261,7 +261,7 @@ def main(): + engine = UpdatePOEngine(filename=arg) + + if not args: +- print 'nothing to do' ++ print('nothing to do') + return + + # We don't care about the rendered output of the .pt file +@@ -284,7 +284,7 @@ def main(): + POTALInterpreter(program, macros, engine, stream=Devnull(), + metal=False)() + except: # Hee hee, I love bare excepts! +- print 'There was an error processing', filename ++ print('There was an error processing', filename) + traceback.print_exc() + + # Now output the keys in the engine. Write them to a file if --output or +@@ -295,7 +295,7 @@ def main(): + outfile = file(outfile, update_mode and "a" or "w") + + catalog = {} +- for domain in engine.catalog.keys(): ++ for domain in list(engine.catalog.keys()): + catalog.update(engine.catalog[domain]) + + messages = catalog.copy() +@@ -304,10 +304,10 @@ def main(): + except AttributeError: + pass + if '' not in messages: +- print >> outfile, pot_header % {'time': time.ctime(), +- 'version': __version__} ++ print(pot_header % {'time': time.ctime(), ++ 'version': __version__}, file=outfile) + +- msgids = catalog.keys() ++ msgids = list(catalog.keys()) + # TODO: You should not sort by msgid, but by filename and position. (SR) + msgids.sort() + for msgid in msgids: +--- src/zope/tal/talinterpreter.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/talinterpreter.py +@@ -29,7 +29,7 @@ from zope.tal.translationcontext import TranslationCon + # Avoid constructing this tuple over and over + I18nMessageTypes = (Message,) + +-TypesToTranslate = I18nMessageTypes + (str, unicode) ++TypesToTranslate = I18nMessageTypes + (str, str) + + BOOLEAN_HTML_ATTRS = frozenset([ + # List of Boolean attributes in HTML that should be rendered in +@@ -251,7 +251,7 @@ class TALInterpreter(object): + def pushMacro(self, macroName, slots, definingName, extending): + if len(self.macroStack) >= self.stackLimit: + raise METALError("macro nesting limit (%d) exceeded " +- "by %s" % (self.stackLimit, `macroName`)) ++ "by %s" % (self.stackLimit, repr(macroName))) + self.macroStack.append( + MacroStackItem((macroName, slots, definingName, extending, + True, self.i18nContext))) +@@ -371,12 +371,13 @@ class TALInterpreter(object): + self.do_startTag(stuff, self.endsep, self.endlen) + bytecode_handlers["startEndTag"] = do_startEndTag + +- def do_startTag(self, (name, attrList), ++ def do_startTag(self, xxx_todo_changeme, + end=">", endlen=1, _len=len): + # The bytecode generator does not cause calls to this method + # for start tags with no attributes; those are optimized down + # to rawtext events. Hence, there is no special "fast path" + # for that case. ++ (name, attrList) = xxx_todo_changeme + self._currentTag = name + L = ["<", name] + append = L.append +@@ -507,8 +508,9 @@ class TALInterpreter(object): + self.restoreOutputState(state) + self.interpret(program) + +- def do_optTag(self, (name, cexpr, tag_ns, isend, start, program), ++ def do_optTag(self, xxx_todo_changeme1, + omit=0): ++ (name, cexpr, tag_ns, isend, start, program) = xxx_todo_changeme1 + if tag_ns and not self.showtal: + return self.no_tag(start, program) + +@@ -528,7 +530,8 @@ class TALInterpreter(object): + self.do_optTag(stuff) + bytecode_handlers["optTag"] = do_optTag + +- def do_rawtextBeginScope(self, (s, col, position, closeprev, dict)): ++ def do_rawtextBeginScope(self, xxx_todo_changeme2): ++ (s, col, position, closeprev, dict) = xxx_todo_changeme2 + self._stream_write(s) + self.col = col + self.do_setPosition(position) +@@ -540,7 +543,8 @@ class TALInterpreter(object): + self.engine.beginScope() + self.scopeLevel = self.scopeLevel + 1 + +- def do_rawtextBeginScope_tal(self, (s, col, position, closeprev, dict)): ++ def do_rawtextBeginScope_tal(self, xxx_todo_changeme3): ++ (s, col, position, closeprev, dict) = xxx_todo_changeme3 + self._stream_write(s) + self.col = col + engine = self.engine +@@ -574,11 +578,13 @@ class TALInterpreter(object): + def do_setLocal(self, notused): + pass + +- def do_setLocal_tal(self, (name, expr)): ++ def do_setLocal_tal(self, xxx_todo_changeme4): ++ (name, expr) = xxx_todo_changeme4 + self.engine.setLocal(name, self.engine.evaluateValue(expr)) + bytecode_handlers["setLocal"] = do_setLocal + +- def do_setGlobal_tal(self, (name, expr)): ++ def do_setGlobal_tal(self, xxx_todo_changeme5): ++ (name, expr) = xxx_todo_changeme5 + self.engine.setGlobal(name, self.engine.evaluateValue(expr)) + bytecode_handlers["setGlobal"] = do_setLocal + +@@ -670,7 +676,7 @@ class TALInterpreter(object): + value = self.translate(value) + + if not structure: +- value = cgi.escape(unicode(value)) ++ value = cgi.escape(str(value)) + + # Either the i18n:name tag is nested inside an i18n:translate in which + # case the last item on the stack has the i18n dictionary and string +@@ -733,7 +739,8 @@ class TALInterpreter(object): + bytecode_handlers["insertStructure"] = do_insertStructure + bytecode_handlers["insertI18nStructure"] = do_insertStructure + +- def do_insertStructure_tal(self, (expr, repldict, block)): ++ def do_insertStructure_tal(self, xxx_todo_changeme6): ++ (expr, repldict, block) = xxx_todo_changeme6 + structure = self.engine.evaluateStructure(expr) + if structure is None: + return +@@ -743,7 +750,7 @@ class TALInterpreter(object): + if isinstance(structure, I18nMessageTypes): + text = self.translate(structure) + else: +- text = unicode(structure) ++ text = str(structure) + if not (repldict or self.strictinsert): + # Take a shortcut, no error checking + self.stream_write(text) +@@ -753,15 +760,16 @@ class TALInterpreter(object): + else: + self.insertXMLStructure(text, repldict) + +- def do_insertI18nStructure_tal(self, (expr, repldict, block)): ++ def do_insertI18nStructure_tal(self, xxx_todo_changeme7): + # TODO: Code duplication is BAD, we need to fix it later ++ (expr, repldict, block) = xxx_todo_changeme7 + structure = self.engine.evaluateStructure(expr) + if structure is not None: + if structure is self.Default: + self.interpret(block) + else: + if not isinstance(structure, TypesToTranslate): +- structure = unicode(structure) ++ structure = str(structure) + text = self.translate(structure) + if not (repldict or self.strictinsert): + # Take a shortcut, no error checking +@@ -807,19 +815,21 @@ class TALInterpreter(object): + self._stream_write(output) + bytecode_handlers["evaluateCode"] = do_evaluateCode + +- def do_loop(self, (name, expr, block)): ++ def do_loop(self, xxx_todo_changeme8): ++ (name, expr, block) = xxx_todo_changeme8 + self.interpret(block) + +- def do_loop_tal(self, (name, expr, block)): ++ def do_loop_tal(self, xxx_todo_changeme9): ++ (name, expr, block) = xxx_todo_changeme9 + iterator = self.engine.setRepeat(name, expr) +- while iterator.next(): ++ while next(iterator): + self.interpret(block) + bytecode_handlers["loop"] = do_loop + + def translate(self, msgid, default=None, i18ndict=None, + obj=None, domain=None): + if default is None: +- default = getattr(msgid, 'default', unicode(msgid)) ++ default = getattr(msgid, 'default', str(msgid)) + if i18ndict is None: + i18ndict = {} + if domain is None: +@@ -832,30 +842,35 @@ class TALInterpreter(object): + return self.engine.translate(msgid, self.i18nContext.domain, + i18ndict, default=default) + +- def do_rawtextColumn(self, (s, col)): ++ def do_rawtextColumn(self, xxx_todo_changeme10): ++ (s, col) = xxx_todo_changeme10 + self._stream_write(s) + self.col = col + bytecode_handlers["rawtextColumn"] = do_rawtextColumn + +- def do_rawtextOffset(self, (s, offset)): ++ def do_rawtextOffset(self, xxx_todo_changeme11): ++ (s, offset) = xxx_todo_changeme11 + self._stream_write(s) + self.col = self.col + offset + bytecode_handlers["rawtextOffset"] = do_rawtextOffset + +- def do_condition(self, (condition, block)): ++ def do_condition(self, xxx_todo_changeme12): ++ (condition, block) = xxx_todo_changeme12 + if not self.tal or self.engine.evaluateBoolean(condition): + self.interpret(block) + bytecode_handlers["condition"] = do_condition + +- def do_defineMacro(self, (macroName, macro)): ++ def do_defineMacro(self, xxx_todo_changeme13): ++ (macroName, macro) = xxx_todo_changeme13 + wasInUse = self.inUseDirective + self.inUseDirective = False + self.interpret(macro) + self.inUseDirective = wasInUse + bytecode_handlers["defineMacro"] = do_defineMacro + +- def do_useMacro(self, (macroName, macroExpr, compiledSlots, block), ++ def do_useMacro(self, xxx_todo_changeme14, + definingName=None, extending=False): ++ (macroName, macroExpr, compiledSlots, block) = xxx_todo_changeme14 + if not self.metal: + self.interpret(block) + return +@@ -865,12 +880,12 @@ class TALInterpreter(object): + else: + if not isCurrentVersion(macro): + raise METALError("macro %s has incompatible version %s" % +- (`macroName`, `getProgramVersion(macro)`), ++ (repr(macroName), repr(getProgramVersion(macro))), + self.position) + mode = getProgramMode(macro) + if mode != (self.html and "html" or "xml"): + raise METALError("macro %s has incompatible mode %s" % +- (`macroName`, `mode`), self.position) ++ (repr(macroName), repr(mode)), self.position) + self.pushMacro(macroName, compiledSlots, definingName, extending) + + # We want 'macroname' name to be always available as a variable +@@ -891,23 +906,26 @@ class TALInterpreter(object): + self.engine.setLocal('macroname', outer) + bytecode_handlers["useMacro"] = do_useMacro + +- def do_extendMacro(self, (macroName, macroExpr, compiledSlots, block, +- definingName)): ++ def do_extendMacro(self, xxx_todo_changeme15): + # extendMacro results from a combination of define-macro and + # use-macro. definingName has the value of the + # metal:define-macro attribute. ++ (macroName, macroExpr, compiledSlots, block, ++ definingName) = xxx_todo_changeme15 + extending = self.metal and self.inUseDirective + self.do_useMacro((macroName, macroExpr, compiledSlots, block), + definingName, extending) + bytecode_handlers["extendMacro"] = do_extendMacro + +- def do_fillSlot(self, (slotName, block)): ++ def do_fillSlot(self, xxx_todo_changeme16): + # This is only executed if the enclosing 'use-macro' evaluates + # to 'default'. ++ (slotName, block) = xxx_todo_changeme16 + self.interpret(block) + bytecode_handlers["fillSlot"] = do_fillSlot + +- def do_defineSlot(self, (slotName, block)): ++ def do_defineSlot(self, xxx_todo_changeme17): ++ (slotName, block) = xxx_todo_changeme17 + if not self.metal: + self.interpret(block) + return +@@ -949,10 +967,12 @@ class TALInterpreter(object): + self.interpret(block) + bytecode_handlers["defineSlot"] = do_defineSlot + +- def do_onError(self, (block, handler)): ++ def do_onError(self, xxx_todo_changeme18): ++ (block, handler) = xxx_todo_changeme18 + self.interpret(block) + +- def do_onError_tal(self, (block, handler)): ++ def do_onError_tal(self, xxx_todo_changeme19): ++ (block, handler) = xxx_todo_changeme19 + state = self.saveState() + self.stream = stream = self.StringIO() + self._stream_write = stream.write +@@ -1004,7 +1024,7 @@ class FasterStringIO(list): + self.append(value) + + def getvalue(self): +- return u''.join(self) ++ return ''.join(self) + + + def _write_ValueError(s): +--- src/zope/tal/tests/markbench.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/tests/markbench.py +@@ -25,7 +25,7 @@ import getopt + import sys + import time + +-from cStringIO import StringIO ++from io import StringIO + + #from zope.documenttemplate.dt_html import HTMLFile + +@@ -108,14 +108,14 @@ def compare(n, count, profiler=None, verbose=1): + t1 = int(time_zpt(tal_fn % n, count) * 1000 + 0.5) + t2 = int(time_tal(tal_fn % n, count) * 1000 + 0.5) + t3 = 'n/a' # int(time_dtml(dtml_fn % n, count) * 1000 + 0.5) +- print '%.2d: %10s %10s %10s' % (n, t1, t2, t3) ++ print('%.2d: %10s %10s %10s' % (n, t1, t2, t3)) + if profiler: + profile_tal(tal_fn % n, count, profiler) + + def main(count, profiler=None, verbose=1): + n = 1 + if verbose: +- print '##: %10s %10s %10s' % ('ZPT', 'TAL', 'DTML') ++ print('##: %10s %10s %10s' % ('ZPT', 'TAL', 'DTML')) + while os.path.isfile(tal_fn % n) and os.path.isfile(dtml_fn % n): + compare(n, count, profiler, verbose) + n = n + 1 +@@ -127,7 +127,7 @@ def get_signal_name(sig): + return name + return None + +-data = {'x':'X', 'r2': range(2), 'r8': range(8), 'r64': range(64)} ++data = {'x':'X', 'r2': list(range(2)), 'r8': list(range(8)), 'r64': list(range(64))} + for i in range(10): + data['x%s' % i] = 'X%s' % i + +@@ -160,9 +160,9 @@ if __name__ == "__main__": + sys.exit(rc) + elif rc < 0: + sig = -rc +- print >>sys.stderr, ( ++ print(( + "Process exited, signal %d (%s)." +- % (sig, get_signal_name(sig) or "")) ++ % (sig, get_signal_name(sig) or "")), file=sys.stderr) + sys.exit(1) + os.chdir(pwd) + +@@ -180,6 +180,6 @@ if __name__ == "__main__": + p.sort_stats('time', 'calls') + try: + p.print_stats(20) +- except IOError, e: ++ except IOError as e: + if e.errno != errno.EPIPE: + raise +--- src/zope/tal/tests/test_files.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/tests/test_files.py +@@ -53,7 +53,7 @@ class FileTestCase(unittest.TestCase): + zope.tal.runtest.main() + finally: + os.chdir(pwd) +- except SystemExit, what: ++ except SystemExit as what: + if what.code: + self.fail("output for %s didn't match" % self.__file) + +--- src/zope/tal/tests/utils.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/tests/utils.py +@@ -50,8 +50,8 @@ def run_suite(suite, outf=None, errf=None): + ## print + newerrs = len(result.errors) + len(result.failures) + if newerrs: +- print "'Errors' indicate exceptions other than AssertionError." +- print "'Failures' indicate AssertionError" ++ print("'Errors' indicate exceptions other than AssertionError.") ++ print("'Failures' indicate AssertionError") + if errf is None: + errf = sys.stderr + errf.write("%d errors, %d failures\n" +--- src/zope/tal/timer.py.orig 2012-02-14 07:21:28 UTC ++++ src/zope/tal/timer.py +@@ -18,7 +18,7 @@ import getopt + import sys + import time + +-from cStringIO import StringIO ++from io import StringIO + + from zope.tal.driver import FILE, compilefile, interpretit + +@@ -27,8 +27,8 @@ def main(): + count = 10 + try: + opts, args = getopt.getopt(sys.argv[1:], "n:") +- except getopt.error, msg: +- print msg ++ except getopt.error as msg: ++ print(msg) + sys.exit(2) + for o, a in opts: + if o == "-n": +@@ -36,7 +36,7 @@ def main(): + if not args: + args = [FILE] + for file in args: +- print file ++ print(file) + dummyfile = StringIO() + it = timefunc(count, compilefile, file) + timefunc(count, interpretit, it, None, dummyfile) +@@ -46,7 +46,7 @@ def timefunc(count, func, *args): + sys.stderr.flush() + t0 = time.clock() + for i in range(count): +- result = apply(func, args) ++ result = func(*args) + t1 = time.clock() + sys.stderr.write("%6.3f secs for %d calls, i.e. %4.0f msecs per call\n" + % ((t1-t0), count, 1000*(t1-t0)/count)) diff --git a/textproc/py-zpt/files/patch-2to3 b/textproc/py-zpt/files/patch-2to3 new file mode 100644 index 00000000000..bb5a22519a8 --- /dev/null +++ b/textproc/py-zpt/files/patch-2to3 @@ -0,0 +1,1071 @@ +--- Expressions.py.orig 2003-03-01 20:25:13 UTC ++++ Expressions.py +@@ -20,14 +20,14 @@ for Python expressions, string literals, and paths. + __version__='$Revision: 1.2 $'[11:-2] + + import re, sys +-from TALES import Engine, CompilerError, _valid_name, NAME_RE, \ ++from .TALES import Engine, CompilerError, _valid_name, NAME_RE, \ + Undefined, Default, _parse_expr + + _engine = None + def getEngine(): + global _engine + if _engine is None: +- from PathIterator import Iterator ++ from .PathIterator import Iterator + _engine = Engine(Iterator) + installHandlers(_engine) + return _engine +@@ -42,7 +42,7 @@ def installHandlers(engine): + reg('not', NotExpr) + reg('defer', DeferExpr) + +-from PythonExpr import getSecurityManager, PythonExpr ++from .PythonExpr import getSecurityManager, PythonExpr + try: + from zExceptions import Unauthorized + except ImportError: +@@ -78,7 +78,7 @@ def render(ob, ns): + ob = call_with_ns(ob, ns, 2) + else: + ob = ob() +- except AttributeError, n: ++ except AttributeError as n: + if str(n) != '__call__': + raise + return ob +@@ -88,7 +88,7 @@ class SubPathExpr: + self._path = path = path.strip().split('/') + self._base = base = path.pop(0) + if not _valid_name(base): +- raise CompilerError, 'Invalid variable name "%s"' % base ++ raise CompilerError('Invalid variable name "%s"' % base) + # Parse path + self._dp = dp = [] + for i in range(len(path)): +@@ -178,10 +178,10 @@ class PathExpr: + return self._eval(econtext) + + def __str__(self): +- return '%s expression %s' % (self._name, `self._s`) ++ return '%s expression %s' % (self._name, repr(self._s)) + + def __repr__(self): +- return '%s:%s' % (self._name, `self._s`) ++ return '%s:%s' % (self._name, repr(self._s)) + + + _interp = re.compile(r'\$(%(n)s)|\${(%(n)s(?:/[^}]*)*)}' % {'n': NAME_RE}) +@@ -205,8 +205,8 @@ class StringExpr: + exp = exp[m.end():] + m = _interp.search(exp) + if '$' in exp: +- raise CompilerError, ( +- '$ must be doubled or followed by a simple path') ++ raise CompilerError(( ++ '$ must be doubled or followed by a simple path')) + parts.append(exp) + expr = ''.join(parts) + self._expr = expr +@@ -222,10 +222,10 @@ class StringExpr: + return self._expr % tuple(vvals) + + def __str__(self): +- return 'string expression %s' % `self._s` ++ return 'string expression %s' % repr(self._s) + + def __repr__(self): +- return 'string:%s' % `self._s` ++ return 'string:%s' % repr(self._s) + + class NotExpr: + def __init__(self, name, expr, compiler): +@@ -239,7 +239,7 @@ class NotExpr: + return (not econtext.evaluateBoolean(self._c)) and 1 or 0 + + def __repr__(self): +- return 'not:%s' % `self._s` ++ return 'not:%s' % repr(self._s) + + class DeferWrapper: + def __init__(self, expr, econtext): +@@ -261,7 +261,7 @@ class DeferExpr: + return DeferWrapper(self._c, econtext) + + def __repr__(self): +- return 'defer:%s' % `self._s` ++ return 'defer:%s' % repr(self._s) + + class TraversalError: + def __init__(self, path, name): +@@ -295,7 +295,7 @@ def restrictedTraverse(self, path, securityManager, + # the object has the attribute "__getitem__" + # instead of blindly catching exceptions. + o = object[name] +- except AttributeError, exc: ++ except AttributeError as exc: + if str(exc).find('__getitem__') >= 0: + # The object does not support the item interface. + # Try to re-raise the original attribute error. +@@ -303,7 +303,7 @@ def restrictedTraverse(self, path, securityManager, + # ExtensionClass instances. + get(object, name) + raise +- except TypeError, exc: ++ except TypeError as exc: + if str(exc).find('unsubscriptable') >= 0: + # The object does not support the item interface. + # Try to re-raise the original attribute error. +--- MultiMapping.py.orig 2003-03-01 14:59:53 UTC ++++ MultiMapping.py +@@ -1,20 +1,21 @@ + import operator ++from functools import reduce + + class MultiMapping: + def __init__(self, *stores): + self.stores = list(stores) + def __getitem__(self, key): + for store in self.stores: +- if store.has_key(key): ++ if key in store: + return store[key] +- raise KeyError, key ++ raise KeyError(key) + _marker = [] + def get(self, key, default=_marker): + for store in self.stores: +- if store.has_key(key): ++ if key in store: + return store[key] + if default is self._marker: +- raise KeyError, key ++ raise KeyError(key) + return default + def __len__(self): + return reduce(operator.add, [len(x) for x in stores], 0) +@@ -25,5 +26,5 @@ class MultiMapping: + def items(self): + l = [] + for store in self.stores: +- l = l + store.items() ++ l = l + list(store.items()) + return l +--- PythonExpr.py.orig 2003-03-01 20:25:13 UTC ++++ PythonExpr.py +@@ -16,7 +16,7 @@ + + __version__='$Revision: 1.2 $'[11:-2] + +-from TALES import CompilerError ++from .TALES import CompilerError + from sys import exc_info + + class getSecurityManager: +@@ -30,16 +30,16 @@ class PythonExpr: + self.expr = expr = expr.strip().replace('\n', ' ') + try: + d = {} +- exec 'def f():\n return %s\n' % expr.strip() in d ++ exec('def f():\n return %s\n' % expr.strip(), d) + self._f = d['f'] + except: +- raise CompilerError, ('Python expression error:\n' +- '%s: %s') % exc_info()[:2] ++ raise CompilerError(('Python expression error:\n' ++ '%s: %s') % exc_info()[:2]) + self._get_used_names() + + def _get_used_names(self): + self._f_varnames = vnames = [] +- for vname in self._f.func_code.co_names: ++ for vname in self._f.__code__.co_names: + if vname[0] not in '$_': + vnames.append(vname) + +@@ -62,7 +62,7 @@ class PythonExpr: + def __call__(self, econtext): + __traceback_info__ = self.expr + f = self._f +- f.func_globals.update(self._bind_used_names(econtext)) ++ f.__globals__.update(self._bind_used_names(econtext)) + return f() + + def __str__(self): +--- TALES.py.orig 2003-03-01 20:25:13 UTC ++++ TALES.py +@@ -18,9 +18,9 @@ An implementation of a generic TALES engine + __version__='$Revision: 1.2 $'[11:-2] + + import re, sys, ZTUtils +-from MultiMapping import MultiMapping +-from TAL.ustr import ustr +-from GlobalTranslationService import getGlobalTranslationService ++from .MultiMapping import MultiMapping ++from .TAL.ustr import ustr ++from .GlobalTranslationService import getGlobalTranslationService + + StringType = type('') + +@@ -65,7 +65,7 @@ class Iterator(ZTUtils.Iterator): + self.name = name + self._context = context + +- def next(self): ++ def __next__(self): + if ZTUtils.Iterator.next(self): + self._context.setLocal(self.name, self.item) + return 1 +@@ -104,10 +104,10 @@ class Engine: + + def registerType(self, name, handler): + if not _valid_name(name): +- raise RegistrationError, 'Invalid Expression type "%s".' % name ++ raise RegistrationError('Invalid Expression type "%s".' % name) + types = self.types +- if types.has_key(name): +- raise RegistrationError, ( ++ if name in types: ++ raise RegistrationError( + 'Multiple registrations for Expression type "%s".' % + name) + types[name] = handler +@@ -126,7 +126,7 @@ class Engine: + try: + handler = self.types[type] + except KeyError: +- raise CompilerError, ( ++ raise CompilerError( + 'Unrecognized expression type "%s".' % type) + return handler(type, expr, self) + +@@ -282,4 +282,4 @@ class SimpleExpr: + def __call__(self, econtext): + return self._name, self._expr + def __repr__(self): +- return '' % (self._name, `self._expr`) ++ return '' % (self._name, repr(self._expr)) +--- TAL/HTMLParser.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/HTMLParser.py +@@ -8,7 +8,7 @@ + # and CDATA (character data -- only end tags are special). + + +-import markupbase ++from . import markupbase + import re + + # Regular expressions used for parsing +@@ -285,7 +285,7 @@ class HTMLParser(markupbase.ParserBase): + else: + offset = offset + len(self.__starttag_text) + self.error("junk characters in start tag: %s" +- % `rawdata[k:endpos][:20]`) ++ % repr(rawdata[k:endpos][:20])) + if end[-2:] == '/>': + # XHTML-style empty tag: + self.handle_startendtag(tag, attrs) +@@ -337,7 +337,7 @@ class HTMLParser(markupbase.ParserBase): + j = match.end() + match = endtagfind.match(rawdata, i) # + if not match: +- self.error("bad end tag: %s" % `rawdata[i:j]`) ++ self.error("bad end tag: %s" % repr(rawdata[i:j])) + tag = match.group(1).lower() + if ( self.cdata_endtag is not None + and tag != self.cdata_endtag): +@@ -388,7 +388,7 @@ class HTMLParser(markupbase.ParserBase): + pass + + def unknown_decl(self, data): +- self.error("unknown declaration: " + `data`) ++ self.error("unknown declaration: " + repr(data)) + + # Internal -- helper to remove special character quoting + def unescape(self, s): +--- TAL/HTMLTALParser.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/HTMLTALParser.py +@@ -17,9 +17,9 @@ Parse HTML and compile to TALInterpreter intermediate + + import sys + +-from TALGenerator import TALGenerator +-from HTMLParser import HTMLParser, HTMLParseError +-from TALDefs import \ ++from .TALGenerator import TALGenerator ++from .HTMLParser import HTMLParser, HTMLParseError ++from .TALDefs import \ + ZOPE_METAL_NS, ZOPE_TAL_NS, ZOPE_I18N_NS, METALError, TALError, I18NError + + BOOLEAN_HTML_ATTRS = [ +@@ -61,7 +61,7 @@ BLOCK_LEVEL_HTML_TAGS = [ + ] + + TIGHTEN_IMPLICIT_CLOSE_TAGS = (PARA_LEVEL_HTML_TAGS +- + BLOCK_CLOSING_TAG_MAP.keys()) ++ + list(BLOCK_CLOSING_TAG_MAP.keys())) + + + class NestingError(HTMLParseError): +@@ -118,7 +118,7 @@ class HTMLTALParser(HTMLParser): + f.close() + try: + self.parseString(data) +- except TALError, e: ++ except TALError as e: + e.setFile(file) + raise + +@@ -175,7 +175,7 @@ class HTMLTALParser(HTMLParser): + if tag in EMPTY_HTML_TAGS: + return + close_to = -1 +- if BLOCK_CLOSING_TAG_MAP.has_key(tag): ++ if tag in BLOCK_CLOSING_TAG_MAP: + blocks_to_close = BLOCK_CLOSING_TAG_MAP[tag] + for i in range(len(self.tagstack)): + t = self.tagstack[i] +@@ -287,19 +287,19 @@ class HTMLTALParser(HTMLParser): + if ns and ns != 'unknown': + item = (key, value, ns) + if ns == 'tal': +- if taldict.has_key(keybase): ++ if keybase in taldict: + raise TALError("duplicate TAL attribute " + +- `keybase`, self.getpos()) ++ repr(keybase), self.getpos()) + taldict[keybase] = value + elif ns == 'metal': +- if metaldict.has_key(keybase): ++ if keybase in metaldict: + raise METALError("duplicate METAL attribute " + +- `keybase`, self.getpos()) ++ repr(keybase), self.getpos()) + metaldict[keybase] = value + elif ns == 'i18n': +- if i18ndict.has_key(keybase): ++ if keybase in i18ndict: + raise I18NError("duplicate i18n attribute " + +- `keybase`, self.getpos()) ++ repr(keybase), self.getpos()) + i18ndict[keybase] = value + attrlist.append(item) + if namens in ('metal', 'tal'): +--- TAL/ITALES.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/ITALES.py +@@ -51,13 +51,14 @@ class ITALESEngine(Interface): + using the 'is' operator in Python. + """ + +- def setPosition((lineno, offset)): ++ def setPosition(xxx_todo_changeme): + """Inform the engine of the current position in the source file. + + This is used to allow the evaluation engine to report + execution errors so that site developers can more easily + locate the offending expression. + """ ++ (lineno, offset) = xxx_todo_changeme + + def setSourceFile(filename): + """Inform the engine of the name of the current source file. +@@ -112,12 +113,13 @@ class ITALESEngine(Interface): + No constraints are imposed on the return value. + """ + +- def createErrorInfo(exception, (lineno, offset)): ++ def createErrorInfo(exception, xxx_todo_changeme1): + """Returns an ITALESErrorInfo object. + + The returned object is used to provide information about the + error condition for the on-error handler. + """ ++ (lineno, offset) = xxx_todo_changeme1 + + def setGlobal(name, value): + """Set a global variable. +--- TAL/TALDefs.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/TALDefs.py +@@ -17,7 +17,7 @@ Common definitions used by TAL and METAL compilation a + + from types import ListType, TupleType + +-from ITALES import ITALESErrorInfo ++from .ITALES import ITALESErrorInfo + + TAL_VERSION = "1.4" + +@@ -118,17 +118,17 @@ def parseAttributeReplacements(arg): + for part in splitParts(arg): + m = _attr_re.match(part) + if not m: +- raise TALError("Bad syntax in attributes:" + `part`) ++ raise TALError("Bad syntax in attributes:" + repr(part)) + name, expr = m.group(1, 2) +- if dict.has_key(name): +- raise TALError("Duplicate attribute name in attributes:" + `part`) ++ if name in dict: ++ raise TALError("Duplicate attribute name in attributes:" + repr(part)) + dict[name] = expr + return dict + + def parseSubstitution(arg, position=(None, None)): + m = _subst_re.match(arg) + if not m: +- raise TALError("Bad syntax in substitution text: " + `arg`, position) ++ raise TALError("Bad syntax in substitution text: " + repr(arg), position) + key, expr = m.group(1, 2) + if not key: + key = "text" +--- TAL/TALGenerator.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/TALGenerator.py +@@ -18,12 +18,12 @@ Code generator for TALInterpreter intermediate code. + import re + import cgi + +-import TALDefs ++from . import TALDefs + +-from TALDefs import NAME_RE, TAL_VERSION +-from TALDefs import I18NError, METALError, TALError +-from TALDefs import parseSubstitution +-from TranslationContext import TranslationContext, DEFAULT_DOMAIN ++from .TALDefs import NAME_RE, TAL_VERSION ++from .TALDefs import I18NError, METALError, TALError ++from .TALDefs import parseSubstitution ++from .TranslationContext import TranslationContext, DEFAULT_DOMAIN + + I18N_REPLACE = 1 + I18N_CONTENT = 2 +@@ -75,7 +75,7 @@ class TALGenerator: + endsep = "/>" + else: + endsep = " />" +- for cursor in xrange(len(program)+1): ++ for cursor in range(len(program)+1): + try: + item = program[cursor] + except IndexError: +@@ -203,8 +203,8 @@ class TALGenerator: + def compileExpression(self, expr): + try: + return self.expressionCompiler.compile(expr) +- except self.CompilerError, err: +- raise TALError('%s in expression %s' % (err.args[0], `expr`), ++ except self.CompilerError as err: ++ raise TALError('%s in expression %s' % (err.args[0], repr(expr)), + self.position) + + def pushProgram(self): +@@ -265,7 +265,7 @@ class TALGenerator: + m = re.match( + r"(?s)\s*(?:(global|local)\s+)?(%s)\s+(.*)\Z" % NAME_RE, part) + if not m: +- raise TALError("invalid define syntax: " + `part`, ++ raise TALError("invalid define syntax: " + repr(part), + self.position) + scope, name, expr = m.group(1, 2, 3) + scope = scope or "local" +@@ -299,7 +299,7 @@ class TALGenerator: + def emitRepeat(self, arg): + m = re.match("(?s)\s*(%s)\s+(.*)\Z" % NAME_RE, arg) + if not m: +- raise TALError("invalid repeat syntax: " + `arg`, ++ raise TALError("invalid repeat syntax: " + repr(arg), + self.position) + name, expr = m.group(1, 2) + cexpr = self.compileExpression(expr) +@@ -362,11 +362,11 @@ class TALGenerator: + def emitDefineMacro(self, macroName): + program = self.popProgram() + macroName = macroName.strip() +- if self.macros.has_key(macroName): +- raise METALError("duplicate macro definition: %s" % `macroName`, ++ if macroName in self.macros: ++ raise METALError("duplicate macro definition: %s" % repr(macroName), + self.position) + if not re.match('%s$' % NAME_RE, macroName): +- raise METALError("invalid macro name: %s" % `macroName`, ++ raise METALError("invalid macro name: %s" % repr(macroName), + self.position) + self.macros[macroName] = program + self.inMacroDef = self.inMacroDef - 1 +@@ -382,18 +382,18 @@ class TALGenerator: + program = self.popProgram() + slotName = slotName.strip() + if not re.match('%s$' % NAME_RE, slotName): +- raise METALError("invalid slot name: %s" % `slotName`, ++ raise METALError("invalid slot name: %s" % repr(slotName), + self.position) + self.emit("defineSlot", slotName, program) + + def emitFillSlot(self, slotName): + program = self.popProgram() + slotName = slotName.strip() +- if self.slots.has_key(slotName): +- raise METALError("duplicate fill-slot name: %s" % `slotName`, ++ if slotName in self.slots: ++ raise METALError("duplicate fill-slot name: %s" % repr(slotName), + self.position) + if not re.match('%s$' % NAME_RE, slotName): +- raise METALError("invalid slot name: %s" % `slotName`, ++ raise METALError("invalid slot name: %s" % repr(slotName), + self.position) + self.slots[slotName] = program + self.inMacroUse = 1 +@@ -457,13 +457,13 @@ class TALGenerator: + newlist = [] + for item in attrlist: + key = item[0] +- if repldict.has_key(key): ++ if key in repldict: + expr, xlat = repldict[key] + item = item[:2] + ("replace", expr, xlat) + del repldict[key] + newlist.append(item) + # Add dynamic-only attributes +- for key, (expr, xlat) in repldict.items(): ++ for key, (expr, xlat) in list(repldict.items()): + newlist.append((key, None, "insert", expr, xlat)) + return newlist + +@@ -478,25 +478,25 @@ class TALGenerator: + return + + self.position = position +- for key, value in taldict.items(): ++ for key, value in list(taldict.items()): + if key not in TALDefs.KNOWN_TAL_ATTRIBUTES: +- raise TALError("bad TAL attribute: " + `key`, position) ++ raise TALError("bad TAL attribute: " + repr(key), position) + if not (value or key == 'omit-tag'): + raise TALError("missing value for TAL attribute: " + +- `key`, position) +- for key, value in metaldict.items(): ++ repr(key), position) ++ for key, value in list(metaldict.items()): + if key not in TALDefs.KNOWN_METAL_ATTRIBUTES: +- raise METALError("bad METAL attribute: " + `key`, ++ raise METALError("bad METAL attribute: " + repr(key), + position) + if not value: + raise TALError("missing value for METAL attribute: " + +- `key`, position) +- for key, value in i18ndict.items(): ++ repr(key), position) ++ for key, value in list(i18ndict.items()): + if key not in TALDefs.KNOWN_I18N_ATTRIBUTES: +- raise I18NError("bad i18n attribute: " + `key`, position) ++ raise I18NError("bad i18n attribute: " + repr(key), position) + if not value and key in ("attributes", "data", "id"): + raise I18NError("missing value for i18n attribute: " + +- `key`, position) ++ repr(key), position) + todo = {} + defineMacro = metaldict.get("define-macro") + useMacro = metaldict.get("use-macro") +@@ -655,10 +655,10 @@ class TALGenerator: + i18nattrs = () + # Convert repldict's name-->expr mapping to a + # name-->(compiled_expr, translate) mapping +- for key, value in repldict.items(): ++ for key, value in list(repldict.items()): + repldict[key] = self.compileExpression(value), key in i18nattrs + for key in i18nattrs: +- if not repldict.has_key(key): ++ if key not in repldict: + repldict[key] = None, 1 + else: + repldict = {} +--- TAL/TALInterpreter.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/TALInterpreter.py +@@ -21,13 +21,13 @@ import re + from types import ListType + from cgi import escape + # Do not use cStringIO here! It's not unicode aware. :( +-from StringIO import StringIO +-from ustr import ustr ++from io import StringIO ++from .ustr import ustr + +-from TALDefs import TAL_VERSION, TALError, METALError +-from TALDefs import isCurrentVersion, getProgramVersion, getProgramMode +-from TALGenerator import TALGenerator +-from TranslationContext import TranslationContext ++from .TALDefs import TAL_VERSION, TALError, METALError ++from .TALDefs import isCurrentVersion, getProgramVersion, getProgramMode ++from .TALGenerator import TALGenerator ++from .TranslationContext import TranslationContext + + BOOLEAN_HTML_ATTRS = [ + # List of Boolean attributes in HTML that should be rendered in +@@ -64,7 +64,7 @@ def interpolate(text, mapping): + # Now substitute with the variables in mapping. + for string in to_replace: + var = _get_var_regex.findall(string)[0] +- if mapping.has_key(var): ++ if var in mapping: + # Call ustr because we may have an integer for instance. + subst = ustr(mapping[var]) + try: +@@ -73,7 +73,7 @@ def interpolate(text, mapping): + # subst contains high-bit chars... + # As we have no way of knowing the correct encoding, + # substitue something instead of raising an exception. +- subst = `subst`[1:-1] ++ subst = repr(subst)[1:-1] + text = text.replace(string, subst) + return text + +@@ -90,7 +90,7 @@ class AltTALGenerator(TALGenerator): + + def emit(self, *args): + if self.enabled: +- apply(TALGenerator.emit, (self,) + args) ++ TALGenerator.emit(*(self,) + args) + + def emitStartElement(self, name, attrlist, taldict, metaldict, i18ndict, + position=(None, None), isend=0): +@@ -176,7 +176,7 @@ class TALInterpreter: + def pushMacro(self, macroName, slots, entering=1): + if len(self.macroStack) >= self.stackLimit: + raise METALError("macro nesting limit (%d) exceeded " +- "by %s" % (self.stackLimit, `macroName`)) ++ "by %s" % (self.stackLimit, repr(macroName))) + self.macroStack.append([macroName, slots, entering, self.i18nContext]) + + def popMacro(self): +@@ -273,12 +273,13 @@ class TALInterpreter: + self.do_startTag(stuff, self.endsep, self.endlen) + bytecode_handlers["startEndTag"] = do_startEndTag + +- def do_startTag(self, (name, attrList), ++ def do_startTag(self, xxx_todo_changeme, + end=">", endlen=1, _len=len): + # The bytecode generator does not cause calls to this method + # for start tags with no attributes; those are optimized down + # to rawtext events. Hence, there is no special "fast path" + # for that case. ++ (name, attrList) = xxx_todo_changeme + L = ["<", name] + append = L.append + col = self.col + _len(name) + 1 +@@ -393,8 +394,9 @@ class TALInterpreter: + self.restoreOutputState(state) + self.interpret(program) + +- def do_optTag(self, (name, cexpr, tag_ns, isend, start, program), ++ def do_optTag(self, xxx_todo_changeme1, + omit=0): ++ (name, cexpr, tag_ns, isend, start, program) = xxx_todo_changeme1 + if tag_ns and not self.showtal: + return self.no_tag(start, program) + +@@ -419,10 +421,11 @@ class TALInterpreter: + for i in range(len(self.macroStack)): + what, macroName, slots = self.macroStack[i][:3] + sys.stderr.write("| %2d. %-12s %-12s %s\n" % +- (i, what, macroName, slots and slots.keys())) ++ (i, what, macroName, slots and list(slots.keys()))) + sys.stderr.write("+--------------------------------------\n") + +- def do_rawtextBeginScope(self, (s, col, position, closeprev, dict)): ++ def do_rawtextBeginScope(self, xxx_todo_changeme2): ++ (s, col, position, closeprev, dict) = xxx_todo_changeme2 + self._stream_write(s) + self.col = col + self.position = position +@@ -435,7 +438,8 @@ class TALInterpreter: + self.engine.beginScope() + self.scopeLevel = self.scopeLevel + 1 + +- def do_rawtextBeginScope_tal(self, (s, col, position, closeprev, dict)): ++ def do_rawtextBeginScope_tal(self, xxx_todo_changeme3): ++ (s, col, position, closeprev, dict) = xxx_todo_changeme3 + self._stream_write(s) + self.col = col + self.position = position +@@ -469,11 +473,13 @@ class TALInterpreter: + def do_setLocal(self, notused): + pass + +- def do_setLocal_tal(self, (name, expr)): ++ def do_setLocal_tal(self, xxx_todo_changeme4): ++ (name, expr) = xxx_todo_changeme4 + self.engine.setLocal(name, self.engine.evaluateValue(expr)) + bytecode_handlers["setLocal"] = do_setLocal + +- def do_setGlobal_tal(self, (name, expr)): ++ def do_setGlobal_tal(self, xxx_todo_changeme5): ++ (name, expr) = xxx_todo_changeme5 + self.engine.setGlobal(name, self.engine.evaluateValue(expr)) + bytecode_handlers["setGlobal"] = do_setLocal + +@@ -588,7 +594,8 @@ class TALInterpreter: + def do_insertStructure(self, stuff): + self.interpret(stuff[2]) + +- def do_insertStructure_tal(self, (expr, repldict, block)): ++ def do_insertStructure_tal(self, xxx_todo_changeme6): ++ (expr, repldict, block) = xxx_todo_changeme6 + structure = self.engine.evaluateStructure(expr) + if structure is None: + return +@@ -607,7 +614,7 @@ class TALInterpreter: + bytecode_handlers["insertStructure"] = do_insertStructure + + def insertHTMLStructure(self, text, repldict): +- from HTMLTALParser import HTMLTALParser ++ from .HTMLTALParser import HTMLTALParser + gen = AltTALGenerator(repldict, self.engine.getCompiler(), 0) + p = HTMLTALParser(gen) # Raises an exception if text is invalid + p.parseString(text) +@@ -615,7 +622,7 @@ class TALInterpreter: + self.interpret(program) + + def insertXMLStructure(self, text, repldict): +- from TALParser import TALParser ++ from .TALParser import TALParser + gen = AltTALGenerator(repldict, self.engine.getCompiler(), 0) + p = TALParser(gen) + gen.enable(0) +@@ -627,12 +634,14 @@ class TALInterpreter: + program, macros = gen.getCode() + self.interpret(program) + +- def do_loop(self, (name, expr, block)): ++ def do_loop(self, xxx_todo_changeme7): ++ (name, expr, block) = xxx_todo_changeme7 + self.interpret(block) + +- def do_loop_tal(self, (name, expr, block)): ++ def do_loop_tal(self, xxx_todo_changeme8): ++ (name, expr, block) = xxx_todo_changeme8 + iterator = self.engine.setRepeat(name, expr) +- while iterator.next(): ++ while next(iterator): + self.interpret(block) + bytecode_handlers["loop"] = do_loop + +@@ -654,22 +663,26 @@ class TALInterpreter: + # XXX We need to pass in one of context or target_language + return self.engine.translate(self.i18nContext.domain, msgid, i18ndict) + +- def do_rawtextColumn(self, (s, col)): ++ def do_rawtextColumn(self, xxx_todo_changeme9): ++ (s, col) = xxx_todo_changeme9 + self._stream_write(s) + self.col = col + bytecode_handlers["rawtextColumn"] = do_rawtextColumn + +- def do_rawtextOffset(self, (s, offset)): ++ def do_rawtextOffset(self, xxx_todo_changeme10): ++ (s, offset) = xxx_todo_changeme10 + self._stream_write(s) + self.col = self.col + offset + bytecode_handlers["rawtextOffset"] = do_rawtextOffset + +- def do_condition(self, (condition, block)): ++ def do_condition(self, xxx_todo_changeme11): ++ (condition, block) = xxx_todo_changeme11 + if not self.tal or self.engine.evaluateBoolean(condition): + self.interpret(block) + bytecode_handlers["condition"] = do_condition + +- def do_defineMacro(self, (macroName, macro)): ++ def do_defineMacro(self, xxx_todo_changeme12): ++ (macroName, macro) = xxx_todo_changeme12 + macs = self.macroStack + if len(macs) == 1: + entering = macs[-1][2] +@@ -682,7 +695,8 @@ class TALInterpreter: + self.interpret(macro) + bytecode_handlers["defineMacro"] = do_defineMacro + +- def do_useMacro(self, (macroName, macroExpr, compiledSlots, block)): ++ def do_useMacro(self, xxx_todo_changeme13): ++ (macroName, macroExpr, compiledSlots, block) = xxx_todo_changeme13 + if not self.metal: + self.interpret(block) + return +@@ -692,12 +706,12 @@ class TALInterpreter: + else: + if not isCurrentVersion(macro): + raise METALError("macro %s has incompatible version %s" % +- (`macroName`, `getProgramVersion(macro)`), ++ (repr(macroName), repr(getProgramVersion(macro))), + self.position) + mode = getProgramMode(macro) + if mode != (self.html and "html" or "xml"): + raise METALError("macro %s has incompatible mode %s" % +- (`macroName`, `mode`), self.position) ++ (repr(macroName), repr(mode)), self.position) + self.pushMacro(macroName, compiledSlots) + prev_source = self.sourceFile + self.interpret(macro) +@@ -707,13 +721,15 @@ class TALInterpreter: + self.popMacro() + bytecode_handlers["useMacro"] = do_useMacro + +- def do_fillSlot(self, (slotName, block)): ++ def do_fillSlot(self, xxx_todo_changeme14): + # This is only executed if the enclosing 'use-macro' evaluates + # to 'default'. ++ (slotName, block) = xxx_todo_changeme14 + self.interpret(block) + bytecode_handlers["fillSlot"] = do_fillSlot + +- def do_defineSlot(self, (slotName, block)): ++ def do_defineSlot(self, xxx_todo_changeme15): ++ (slotName, block) = xxx_todo_changeme15 + if not self.metal: + self.interpret(block) + return +@@ -734,10 +750,12 @@ class TALInterpreter: + self.interpret(block) + bytecode_handlers["defineSlot"] = do_defineSlot + +- def do_onError(self, (block, handler)): ++ def do_onError(self, xxx_todo_changeme16): ++ (block, handler) = xxx_todo_changeme16 + self.interpret(block) + +- def do_onError_tal(self, (block, handler)): ++ def do_onError_tal(self, xxx_todo_changeme17): ++ (block, handler) = xxx_todo_changeme17 + state = self.saveState() + self.stream = stream = self.StringIO() + self._stream_write = stream.write +@@ -792,4 +810,4 @@ class FasterStringIO(StringIO): + + + def _write_ValueError(s): +- raise ValueError, "I/O operation on closed file" ++ raise ValueError("I/O operation on closed file") +--- TAL/TALParser.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/TALParser.py +@@ -15,9 +15,9 @@ + Parse XML and compile to TALInterpreter intermediate code. + """ + +-from XMLParser import XMLParser +-from TALDefs import XML_NS, ZOPE_I18N_NS, ZOPE_METAL_NS, ZOPE_TAL_NS +-from TALGenerator import TALGenerator ++from .XMLParser import XMLParser ++from .TALDefs import XML_NS, ZOPE_I18N_NS, ZOPE_METAL_NS, ZOPE_TAL_NS ++from .TALGenerator import TALGenerator + + class TALParser(XMLParser): + +@@ -56,7 +56,7 @@ class TALParser(XMLParser): + attrlist.append((key, value)) + else: + # attrs is a dict of {name: value} +- attrlist = attrs.items() ++ attrlist = list(attrs.items()) + attrlist.sort() # For definiteness + name, attrlist, taldict, metaldict, i18ndict \ + = self.process_ns(name, attrlist) +@@ -80,7 +80,7 @@ class TALParser(XMLParser): + taldict[keybase] = value + item = item + ("tal",) + elif ns == 'i18n': +- assert 0, "dealing with i18n: " + `(keybase, value)` ++ assert 0, "dealing with i18n: " + repr((keybase, value)) + i18ndict[keybase] = value + item = item + ('i18n',) + fixedattrlist.append(item) +@@ -135,7 +135,7 @@ def test(): + file = sys.argv[1] + p.parseFile(file) + program, macros = p.getCode() +- from TALInterpreter import TALInterpreter ++ from .TALInterpreter import TALInterpreter + from DummyEngine import DummyEngine + engine = DummyEngine(macros) + TALInterpreter(program, macros, engine, sys.stdout, wrap=0)() +--- TAL/markupbase.py.orig 2003-03-01 20:25:14 UTC ++++ TAL/markupbase.py +@@ -85,7 +85,7 @@ class ParserBase: + self.error("unexpected '[' char in declaration") + else: + self.error( +- "unexpected %s char in declaration" % `rawdata[j]`) ++ "unexpected %s char in declaration" % repr(rawdata[j])) + if j < 0: + return j + return -1 # incomplete +@@ -106,7 +106,7 @@ class ParserBase: + if s != "= self.length: raise IndexError, index ++ if index >= self.length: raise IndexError(index) + return self._sequence[index+self.first] + + def __len__(self): +--- ZTUtils/Iterator.py.orig 2003-03-01 20:25:14 UTC ++++ ZTUtils/Iterator.py +@@ -34,16 +34,16 @@ class Iterator: + self._inner = inner + self._prep_next = inner.prep_next + return +- raise TypeError, "Iterator does not support %s" % `seq` ++ raise TypeError("Iterator does not support %s" % repr(seq)) + + def __getattr__(self, name): + try: + inner = getattr(self._inner, 'it_' + name) + except AttributeError: +- raise AttributeError, name ++ raise AttributeError(name) + return inner(self) + +- def next(self): ++ def __next__(self): + if not (hasattr(self, '_next') or self._prep_next(self)): + return 0 + self.index = i = self.nextIndex +@@ -171,7 +171,7 @@ class IterInner(InnerBase): + + def prep_next(self, it): + try: +- it._next = it.seq.next() ++ it._next = next(it.seq) + except StopIteration: + it._prep_next = self.no_next + it.end = 1 +@@ -183,12 +183,12 @@ class IterIter: + def __init__(self, it): + self.it = it + self.skip = it.nextIndex > 0 and not it.end +- def next(self): ++ def __next__(self): + it = self.it + if self.skip: + self.skip = 0 + return it.item +- if it.next(): ++ if next(it): + return it.item + raise StopIteration + +--- ZTUtils/Tree.py.orig 2003-03-01 20:25:14 UTC ++++ ZTUtils/Tree.py +@@ -51,7 +51,7 @@ class TreeNode: + def __getattr__(self, name): + if name == 'depth': + return self._depth() +- raise AttributeError, name ++ raise AttributeError(name) + + _marker = [] + +@@ -153,7 +153,7 @@ class TreeMaker: + child_exp = expanded + if not simple_type(expanded): + # Assume a mapping +- expanded = expanded.has_key(node.id) ++ expanded = node.id in expanded + child_exp = child_exp.get(node.id) + + expanded = expanded or (not subtree and self._expand_root) +@@ -218,7 +218,7 @@ class TreeMaker: + + def simple_type(ob, + is_simple={type(''):1, type(0):1, type(0.0):1, +- type(0L):1, type(None):1 }.has_key): ++ type(0):1, type(None):1 }.has_key): + return is_simple(type(ob)) + + from binascii import b2a_base64, a2b_base64 +--- __init__.py.orig 2003-03-01 22:29:42 UTC ++++ __init__.py +@@ -19,12 +19,12 @@ __version__='$Revision: 1.3 $'[11:-2] + + import sys + +-from TAL.TALParser import TALParser +-from TAL.HTMLTALParser import HTMLTALParser +-from TAL.TALGenerator import TALGenerator ++from .TAL.TALParser import TALParser ++from .TAL.HTMLTALParser import HTMLTALParser ++from .TAL.TALGenerator import TALGenerator + # Do not use cStringIO here! It's not unicode aware. :( +-from TAL.TALInterpreter import TALInterpreter, FasterStringIO +-from Expressions import getEngine ++from .TAL.TALInterpreter import TALInterpreter, FasterStringIO ++from .Expressions import getEngine + + + class PageTemplate: +@@ -49,7 +49,7 @@ class PageTemplate: + def __getattr__(self, name): + if name == 'macros': + return self.pt_macros() +- raise AttributeError, name ++ raise AttributeError(name) + + def pt_edit(self, text, content_type): + if content_type: +@@ -83,7 +83,7 @@ class PageTemplate: + __traceback_supplement__ = (PageTemplateTracebackSupplement, self) + + if self._v_errors: +- raise PTRuntimeError, 'Page Template %s has errors: (%s)' % (self.id, ', '.join(self._v_errors)) ++ raise PTRuntimeError('Page Template %s has errors: (%s)' % (self.id, ', '.join(self._v_errors))) + output = self.StringIO() + c = self.pt_getContext() + c.update(extra_context) +@@ -95,7 +95,7 @@ class PageTemplate: + return output.getvalue() + + def __call__(self, *args, **kwargs): +- if not kwargs.has_key('args'): ++ if 'args' not in kwargs: + kwargs['args'] = args + return self.pt_render(extra_context={'options': kwargs}) + +@@ -121,7 +121,7 @@ class PageTemplate: + self._cook() + if self._v_errors: + __traceback_supplement__ = (PageTemplateTracebackSupplement, self) +- raise PTRuntimeError, 'Page Template %s has errors.' % self.id ++ raise PTRuntimeError('Page Template %s has errors.' % self.id) + return self._v_macros + + def pt_source_file(self): diff --git a/textproc/raptor/Makefile b/textproc/raptor/Makefile index 62c559f3368..14ef13eaa6f 100644 --- a/textproc/raptor/Makefile +++ b/textproc/raptor/Makefile @@ -2,7 +2,7 @@ PORTNAME= raptor PORTVERSION= 1.4.21 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= textproc MASTER_SITES= SF/librdf/${PORTNAME}/${PORTVERSION} \ http://download.librdf.org/source/ diff --git a/textproc/raptor2/Makefile b/textproc/raptor2/Makefile index 823f4b851c9..dd1882bbcf8 100644 --- a/textproc/raptor2/Makefile +++ b/textproc/raptor2/Makefile @@ -2,7 +2,7 @@ PORTNAME= raptor2 PORTVERSION= 2.0.15 -PORTREVISION= 19 +PORTREVISION= 20 CATEGORIES= textproc MASTER_SITES= http://download.librdf.org/source/ \ SF/librdf/${PORTNAME}/${PORTVERSION} diff --git a/textproc/rubygem-actiontext60/Makefile b/textproc/rubygem-actiontext60/Makefile index eddb1ad715a..fd5ab383fdc 100644 --- a/textproc/rubygem-actiontext60/Makefile +++ b/textproc/rubygem-actiontext60/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= actiontext -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= textproc rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/textproc/rubygem-actiontext60/distinfo b/textproc/rubygem-actiontext60/distinfo index 2414e8c5e22..2b0167f0adb 100644 --- a/textproc/rubygem-actiontext60/distinfo +++ b/textproc/rubygem-actiontext60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058918 -SHA256 (rubygem/actiontext-6.0.4.6.gem) = a9b7e4a72be9b707d91de375a576dbe3a901532962d65267d0066a2c080581e2 -SIZE (rubygem/actiontext-6.0.4.6.gem) = 17408 +TIMESTAMP = 1647264924 +SHA256 (rubygem/actiontext-6.0.4.7.gem) = f604a7e8145c4650df40a82d41213c6adf166e74b731ba5b531160c9bb52c486 +SIZE (rubygem/actiontext-6.0.4.7.gem) = 17408 diff --git a/textproc/rubygem-actiontext61/Makefile b/textproc/rubygem-actiontext61/Makefile index 7ba1a99cb62..aed15e68874 100644 --- a/textproc/rubygem-actiontext61/Makefile +++ b/textproc/rubygem-actiontext61/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= actiontext -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= textproc rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/textproc/rubygem-actiontext61/distinfo b/textproc/rubygem-actiontext61/distinfo index 396ce5b425a..bb7b33d7293 100644 --- a/textproc/rubygem-actiontext61/distinfo +++ b/textproc/rubygem-actiontext61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058944 -SHA256 (rubygem/actiontext-6.1.4.6.gem) = f760fab407a42b132820e08ad3d166f37ffca6391c05593238031a59aa2856d8 -SIZE (rubygem/actiontext-6.1.4.6.gem) = 19456 +TIMESTAMP = 1647264950 +SHA256 (rubygem/actiontext-6.1.4.7.gem) = 8d87bd15a215fb30659bcb0b57b8327d968c45fa67840967fada33a7e1b79137 +SIZE (rubygem/actiontext-6.1.4.7.gem) = 19456 diff --git a/textproc/rubygem-actiontext70/Makefile b/textproc/rubygem-actiontext70/Makefile index 76988f64a42..4eebed62161 100644 --- a/textproc/rubygem-actiontext70/Makefile +++ b/textproc/rubygem-actiontext70/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= actiontext -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= textproc rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/textproc/rubygem-actiontext70/distinfo b/textproc/rubygem-actiontext70/distinfo index aa576a24a48..a5aab8ac53f 100644 --- a/textproc/rubygem-actiontext70/distinfo +++ b/textproc/rubygem-actiontext70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058970 -SHA256 (rubygem/actiontext-7.0.2.gem) = afec44c03e400848bb2ff4fc63383cd0f111e73b6a59b89a816d0a431ce14557 -SIZE (rubygem/actiontext-7.0.2.gem) = 91648 +TIMESTAMP = 1647264976 +SHA256 (rubygem/actiontext-7.0.2.3.gem) = 78f9f8b3cab1a48bfeb8ca1ba45ebe7daf717463a8720f3f883a2cc27a047e2e +SIZE (rubygem/actiontext-7.0.2.3.gem) = 91648 diff --git a/textproc/rubygem-chewy/Makefile b/textproc/rubygem-chewy/Makefile index c7904036432..3dd638c4100 100644 --- a/textproc/rubygem-chewy/Makefile +++ b/textproc/rubygem-chewy/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= chewy -PORTVERSION= 7.2.4 +PORTVERSION= 7.2.5 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-chewy/distinfo b/textproc/rubygem-chewy/distinfo index baf0e8b029b..faab75d6712 100644 --- a/textproc/rubygem-chewy/distinfo +++ b/textproc/rubygem-chewy/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058862 -SHA256 (rubygem/chewy-7.2.4.gem) = 4e1bcf8028739a4937e9f695064b93e3cac2b35585d79e404d03dd233ef8cdc9 -SIZE (rubygem/chewy-7.2.4.gem) = 155648 +TIMESTAMP = 1647264862 +SHA256 (rubygem/chewy-7.2.5.gem) = b8be693965575ec60092bb0dfaa1f3fd3d67a60c642056831bd91a0f45d7c0c2 +SIZE (rubygem/chewy-7.2.5.gem) = 159744 diff --git a/textproc/rubygem-commonmarker/Makefile b/textproc/rubygem-commonmarker/Makefile index df3b1be941a..6e73b702a3c 100644 --- a/textproc/rubygem-commonmarker/Makefile +++ b/textproc/rubygem-commonmarker/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= commonmarker -PORTVERSION= 0.23.2 +PORTVERSION= 0.23.4 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-commonmarker/distinfo b/textproc/rubygem-commonmarker/distinfo index ce24a15cc81..154bf84e781 100644 --- a/textproc/rubygem-commonmarker/distinfo +++ b/textproc/rubygem-commonmarker/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632227624 -SHA256 (rubygem/commonmarker-0.23.2.gem) = 3adf02bef0de13daa867e1ad648f7433a50bf05529aef19d1e673eb92bd2506b -SIZE (rubygem/commonmarker-0.23.2.gem) = 155648 +TIMESTAMP = 1647264864 +SHA256 (rubygem/commonmarker-0.23.4.gem) = 95d9cb050576376374a66d71a4feab3562e0955aab9d80a3e8606a5cf5e9c7ce +SIZE (rubygem/commonmarker-0.23.4.gem) = 156160 diff --git a/textproc/rubygem-elasticsearch-api/Makefile b/textproc/rubygem-elasticsearch-api/Makefile index b93686572b1..8cd9cea1b00 100644 --- a/textproc/rubygem-elasticsearch-api/Makefile +++ b/textproc/rubygem-elasticsearch-api/Makefile @@ -1,7 +1,7 @@ # Created by: Ryan Steinmetz PORTNAME= elasticsearch-api -PORTVERSION= 7.17.0 +PORTVERSION= 7.17.1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-elasticsearch-api/distinfo b/textproc/rubygem-elasticsearch-api/distinfo index 43ad395b71a..5d39b20b47f 100644 --- a/textproc/rubygem-elasticsearch-api/distinfo +++ b/textproc/rubygem-elasticsearch-api/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058866 -SHA256 (rubygem/elasticsearch-api-7.17.0.gem) = 2781e649fbdd4db190e6157baed77e277e18179f318697c44738b1983e81f410 -SIZE (rubygem/elasticsearch-api-7.17.0.gem) = 93696 +TIMESTAMP = 1647264868 +SHA256 (rubygem/elasticsearch-api-7.17.1.gem) = 89eef58aa624e0ec2b17ebe0361f4cccb7cced3de11de34fc981ee741f5ecac9 +SIZE (rubygem/elasticsearch-api-7.17.1.gem) = 93696 diff --git a/textproc/rubygem-elasticsearch-transport/Makefile b/textproc/rubygem-elasticsearch-transport/Makefile index 69fc8e53eba..584cbf9897d 100644 --- a/textproc/rubygem-elasticsearch-transport/Makefile +++ b/textproc/rubygem-elasticsearch-transport/Makefile @@ -1,7 +1,7 @@ # Created by: Ryan Steinmetz PORTNAME= elasticsearch-transport -PORTVERSION= 7.17.0 +PORTVERSION= 7.17.1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-elasticsearch-transport/distinfo b/textproc/rubygem-elasticsearch-transport/distinfo index bb69033a3f4..096ec990bf4 100644 --- a/textproc/rubygem-elasticsearch-transport/distinfo +++ b/textproc/rubygem-elasticsearch-transport/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058874 -SHA256 (rubygem/elasticsearch-transport-7.17.0.gem) = 1b608d96a3fda3cc594c7cecee084365cd3cdeeffba827e98ec811728bdddb22 -SIZE (rubygem/elasticsearch-transport-7.17.0.gem) = 58880 +TIMESTAMP = 1647264870 +SHA256 (rubygem/elasticsearch-transport-7.17.1.gem) = 8ab3595e8f42976ff5493c8ef594abbc5b417c9059fcc59281b50768f2ebabc6 +SIZE (rubygem/elasticsearch-transport-7.17.1.gem) = 59392 diff --git a/textproc/rubygem-elasticsearch-xpack/Makefile b/textproc/rubygem-elasticsearch-xpack/Makefile index d3dc59d239a..e157b9477a1 100644 --- a/textproc/rubygem-elasticsearch-xpack/Makefile +++ b/textproc/rubygem-elasticsearch-xpack/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= elasticsearch-xpack -PORTVERSION= 7.17.0 +PORTVERSION= 7.17.1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-elasticsearch-xpack/distinfo b/textproc/rubygem-elasticsearch-xpack/distinfo index 22697f29a4b..926dee81d86 100644 --- a/textproc/rubygem-elasticsearch-xpack/distinfo +++ b/textproc/rubygem-elasticsearch-xpack/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058876 -SHA256 (rubygem/elasticsearch-xpack-7.17.0.gem) = 934abf618730525ec810f6763ba0ccd0f6e2224c15b660967f826d325c829683 -SIZE (rubygem/elasticsearch-xpack-7.17.0.gem) = 63488 +TIMESTAMP = 1647264872 +SHA256 (rubygem/elasticsearch-xpack-7.17.1.gem) = c0996419a9f8a40b1a1227602946a8e39879e460ea518a6386395d2526b9aca1 +SIZE (rubygem/elasticsearch-xpack-7.17.1.gem) = 63488 diff --git a/textproc/rubygem-elasticsearch/Makefile b/textproc/rubygem-elasticsearch/Makefile index 0d22994e325..ba3db74244b 100644 --- a/textproc/rubygem-elasticsearch/Makefile +++ b/textproc/rubygem-elasticsearch/Makefile @@ -1,7 +1,7 @@ # Created by: Ryan Steinmetz PORTNAME= elasticsearch -PORTVERSION= 7.17.0 +PORTVERSION= 7.17.1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-elasticsearch/distinfo b/textproc/rubygem-elasticsearch/distinfo index 9fdf85cc3e0..1eb26eeb7b0 100644 --- a/textproc/rubygem-elasticsearch/distinfo +++ b/textproc/rubygem-elasticsearch/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058864 -SHA256 (rubygem/elasticsearch-7.17.0.gem) = 388b5451cc77d7632791ed961e250fbf33ce5cefde3b6855faf6e2d7f8775636 -SIZE (rubygem/elasticsearch-7.17.0.gem) = 15360 +TIMESTAMP = 1647264866 +SHA256 (rubygem/elasticsearch-7.17.1.gem) = 762db6d368bef8268a1f811bb87eb98625d94bfa2bf73b43cc7525af5ea3270c +SIZE (rubygem/elasticsearch-7.17.1.gem) = 15360 diff --git a/textproc/rubygem-jekyll-sass-converter/Makefile b/textproc/rubygem-jekyll-sass-converter/Makefile index 3837a396a55..a53b2388f79 100644 --- a/textproc/rubygem-jekyll-sass-converter/Makefile +++ b/textproc/rubygem-jekyll-sass-converter/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= jekyll-sass-converter -PORTVERSION= 2.1.0 +PORTVERSION= 2.2.0 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-jekyll-sass-converter/distinfo b/textproc/rubygem-jekyll-sass-converter/distinfo index 72859640a54..837c17e1933 100644 --- a/textproc/rubygem-jekyll-sass-converter/distinfo +++ b/textproc/rubygem-jekyll-sass-converter/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1581007204 -SHA256 (rubygem/jekyll-sass-converter-2.1.0.gem) = bb25965bfdec2c61220192f45d9358d34a9fce388f72ec95119fc6cc09c9cc12 -SIZE (rubygem/jekyll-sass-converter-2.1.0.gem) = 7680 +TIMESTAMP = 1647264874 +SHA256 (rubygem/jekyll-sass-converter-2.2.0.gem) = 71894f61b1f062f9d56e90b83b29386c85af349749c5d6277a5ba4b2e69cfd1c +SIZE (rubygem/jekyll-sass-converter-2.2.0.gem) = 8192 diff --git a/textproc/rubygem-libxml-ruby/Makefile b/textproc/rubygem-libxml-ruby/Makefile index f0b2888f995..27639dc3211 100644 --- a/textproc/rubygem-libxml-ruby/Makefile +++ b/textproc/rubygem-libxml-ruby/Makefile @@ -2,6 +2,7 @@ PORTNAME= libxml-ruby PORTVERSION= 3.2.1 +PORTREVISION= 1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-liquid/Makefile b/textproc/rubygem-liquid/Makefile index b1d047b14d4..d1c1a041dc0 100644 --- a/textproc/rubygem-liquid/Makefile +++ b/textproc/rubygem-liquid/Makefile @@ -1,7 +1,7 @@ # Created by: peter.schuller@infidyne.com PORTNAME= liquid -PORTVERSION= 5.1.0 +PORTVERSION= 5.2.0 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-liquid/distinfo b/textproc/rubygem-liquid/distinfo index 3924b70ab80..53af99f9735 100644 --- a/textproc/rubygem-liquid/distinfo +++ b/textproc/rubygem-liquid/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632037998 -SHA256 (rubygem/liquid-5.1.0.gem) = 989868936ea8546c27fb1066c1da2f676f7fdf13db49eab174e79c1bc4fbb434 -SIZE (rubygem/liquid-5.1.0.gem) = 90624 +TIMESTAMP = 1647264876 +SHA256 (rubygem/liquid-5.2.0.gem) = 815a09e27d9f1f96c0ab59b1c92947120d381022941c41172445d40df7fa5899 +SIZE (rubygem/liquid-5.2.0.gem) = 91136 diff --git a/textproc/rubygem-nokogiri/Makefile b/textproc/rubygem-nokogiri/Makefile index 0170b91d773..0455a2bbfa6 100644 --- a/textproc/rubygem-nokogiri/Makefile +++ b/textproc/rubygem-nokogiri/Makefile @@ -1,5 +1,6 @@ PORTNAME= nokogiri PORTVERSION= 1.13.3 +PORTREVISION= 1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-nokogiri111/Makefile b/textproc/rubygem-nokogiri111/Makefile index 8eb75fe7edd..403c24c85e4 100644 --- a/textproc/rubygem-nokogiri111/Makefile +++ b/textproc/rubygem-nokogiri111/Makefile @@ -1,5 +1,6 @@ PORTNAME= nokogiri PORTVERSION= 1.11.7 +PORTREVISION= 1 CATEGORIES= textproc rubygems MASTER_SITES= RG PKGNAMESUFFIX= 111 diff --git a/textproc/rubygem-nokogumbo/Makefile b/textproc/rubygem-nokogumbo/Makefile index 53d4ae56850..5ef8a22ee51 100644 --- a/textproc/rubygem-nokogumbo/Makefile +++ b/textproc/rubygem-nokogumbo/Makefile @@ -2,6 +2,7 @@ PORTNAME= nokogumbo PORTVERSION= 2.0.5 +PORTREVISION= 1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-ruby-augeas/Makefile b/textproc/rubygem-ruby-augeas/Makefile index dc4aa861c70..291f2b15f89 100644 --- a/textproc/rubygem-ruby-augeas/Makefile +++ b/textproc/rubygem-ruby-augeas/Makefile @@ -2,7 +2,7 @@ PORTNAME= ruby-augeas PORTVERSION= 0.5.0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/rubygem-ruby-xslt/Makefile b/textproc/rubygem-ruby-xslt/Makefile index 21e13a6202e..63f358a02ec 100644 --- a/textproc/rubygem-ruby-xslt/Makefile +++ b/textproc/rubygem-ruby-xslt/Makefile @@ -2,6 +2,7 @@ PORTNAME= ruby-xslt PORTVERSION= 0.9.10 +PORTREVISION= 1 CATEGORIES= textproc rubygems MASTER_SITES= RG diff --git a/textproc/ssddiff/Makefile b/textproc/ssddiff/Makefile index b231ee7fba8..5e2557e42e6 100644 --- a/textproc/ssddiff/Makefile +++ b/textproc/ssddiff/Makefile @@ -2,7 +2,7 @@ PORTNAME= ssddiff PORTVERSION= 0.2 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= textproc MASTER_SITES= http://freebsd.nsu.ru/distfiles/ \ http://ftp.is.co.za/FreeBSD/ports/distfiles/ \ diff --git a/textproc/translate-toolkit/Makefile b/textproc/translate-toolkit/Makefile index ade635d9228..ec31e3c6b0b 100644 --- a/textproc/translate-toolkit/Makefile +++ b/textproc/translate-toolkit/Makefile @@ -1,7 +1,7 @@ # Created by: Andrew Pantyukhin PORTNAME= translate-toolkit -PORTVERSION= 3.5.3 +PORTVERSION= 3.6.0 CATEGORIES= textproc MASTER_SITES= CHEESESHOP @@ -43,7 +43,7 @@ TMSERVER_DESC= Tmserver backend support TRADOS_DESC= Trados format support YAML_DESC= YAML format support -ENCODING_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}chardet>=4.0.0,1:textproc/py-chardet@${PY_FLAVOR} +ENCODING_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}charset-normalizer>=2.0.12:textproc/py-charset-normalizer@${PY_FLAVOR} FLUENT_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}fluent.syntax>=0.18.1:textproc/py-fluent.syntax@${PY_FLAVOR} ICAL_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}vobject>=0.9.6.1:deskutils/py-vobject@${PY_FLAVOR} ICAL_VARS= MANPAGES+="ical2po po2ical" @@ -53,7 +53,7 @@ LANGUAGES_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pycountry>=22.1.10:textproc/py-pyc LEVENSHTEIN_RUN_DEPENDS=${PYTHON_PKGNAMEPREFIX}python-Levenshtein>=0.12:devel/py-python-Levenshtein@${PY_FLAVOR} PHP_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}phply>=1.2.5:devel/py-phply@${PY_FLAVOR} PHP_VARS= MANPAGES+="php2po po2php" -RC_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pyparsing>=3.0.6:devel/py-pyparsing@${PY_FLAVOR} +RC_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pyparsing>=3.0.7:devel/py-pyparsing@${PY_FLAVOR} RC_VARS= MANPAGES+="po2rc rc2po" SPELLCHECK_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}enchant>=3.2.2:textproc/py-enchant@${PY_FLAVOR} SUBTITLES_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}aeidon>=1.10.1:textproc/py-aeidon@${PY_FLAVOR} diff --git a/textproc/translate-toolkit/distinfo b/textproc/translate-toolkit/distinfo index 14f43dd7486..706506cb104 100644 --- a/textproc/translate-toolkit/distinfo +++ b/textproc/translate-toolkit/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133687 -SHA256 (translate-toolkit-3.5.3.tar.gz) = b7ca3e0e8f69c306c372e05a0a814ecafa6176d30ce314e787378dabf3e48dfb -SIZE (translate-toolkit-3.5.3.tar.gz) = 6153779 +TIMESTAMP = 1647264448 +SHA256 (translate-toolkit-3.6.0.tar.gz) = dfdb19383920948e5bc1dafacb994ee07f8d6ecc053cd6e2b4c545ce0430ddff +SIZE (translate-toolkit-3.6.0.tar.gz) = 6184485 diff --git a/textproc/translate-toolkit/files/patch-requirements-optional.txt b/textproc/translate-toolkit/files/patch-requirements-optional.txt index e0b46cc0e64..7d9d3fc1d1d 100644 --- a/textproc/translate-toolkit/files/patch-requirements-optional.txt +++ b/textproc/translate-toolkit/files/patch-requirements-optional.txt @@ -1,4 +1,4 @@ ---- requirements/optional.txt.orig 2021-10-27 09:01:25 UTC +--- requirements/optional.txt.orig 2022-02-25 08:27:33 UTC +++ requirements/optional.txt @@ -1,27 +1,27 @@ -r required.txt @@ -9,8 +9,8 @@ # Format support BeautifulSoup4>=4.3 # Trados # Encoding detection --chardet==4.0.0 # chardet -+chardet>=4.0.0 # chardet +-charset-normalizer==2.0.12 # chardet ++charset-normalizer>=2.0.12 # chardet # Tmserver backend -cheroot==8.6.0 # tmserver +cheroot>=8.6.0 # tmserver @@ -29,12 +29,12 @@ +pycountry>=22.1.10 # Languages +pyenchant>=3.2.2 # spellcheck # Windows Resources (rc2po and po2rc) --pyparsing==3.0.6 # RC -+pyparsing>=3.0.6 # RC +-pyparsing==3.0.7 # RC ++pyparsing>=3.0.7 # RC # Faster matching in e.g. pot2po python-Levenshtein>=0.12 # Levenshtein # Format support --ruamel.yaml==0.17.20 # YAML +-ruamel.yaml==0.17.21 # YAML +ruamel.yaml>=0.16.12 # YAML # Format support -vobject==0.9.6.1 # iCal diff --git a/textproc/ucto/Makefile b/textproc/ucto/Makefile index a24e834e865..2265be96231 100644 --- a/textproc/ucto/Makefile +++ b/textproc/ucto/Makefile @@ -1,6 +1,7 @@ PORTNAME= ucto DISTVERSIONPREFIX= v DISTVERSION= 0.24.1 +PORTREVISION= 1 CATEGORIES= textproc MAINTAINER= yuri@FreeBSD.org diff --git a/textproc/wv/Makefile b/textproc/wv/Makefile index 9f5cc3b6aa7..12f7b2d1dc1 100644 --- a/textproc/wv/Makefile +++ b/textproc/wv/Makefile @@ -2,7 +2,7 @@ PORTNAME= wv PORTVERSION= 1.2.9 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= textproc MASTER_SITES= http://www.abisource.com/downloads/wv/${PORTVERSION}/ diff --git a/textproc/wv2/Makefile b/textproc/wv2/Makefile index 46e2faa6935..2a5b87bf143 100644 --- a/textproc/wv2/Makefile +++ b/textproc/wv2/Makefile @@ -2,7 +2,7 @@ PORTNAME= wv2 PORTVERSION= 0.4.2 -PORTREVISION= 8 +PORTREVISION= 9 CATEGORIES= textproc MASTER_SITES= SF/wvware/ diff --git a/textproc/xmlroff/Makefile b/textproc/xmlroff/Makefile index 9172f9b2b37..4945a52a477 100644 --- a/textproc/xmlroff/Makefile +++ b/textproc/xmlroff/Makefile @@ -2,6 +2,7 @@ PORTNAME= xmlroff PORTVERSION= 0.6.3 +PORTREVISION= 1 DISTVERSIONPREFIX= v CATEGORIES= textproc diff --git a/textproc/xmlstarlet/Makefile b/textproc/xmlstarlet/Makefile index 19a47ede1ec..d0cf3effdac 100644 --- a/textproc/xmlstarlet/Makefile +++ b/textproc/xmlstarlet/Makefile @@ -2,6 +2,7 @@ PORTNAME= xmlstarlet PORTVERSION= 1.6.1 +PORTREVISION= 1 CATEGORIES= textproc MASTER_SITES= SF/xmlstar/${PORTNAME}/${PORTVERSION} diff --git a/textproc/xmlwrapp/Makefile b/textproc/xmlwrapp/Makefile index f7f86726ccc..d30accf11e4 100644 --- a/textproc/xmlwrapp/Makefile +++ b/textproc/xmlwrapp/Makefile @@ -2,7 +2,7 @@ PORTNAME= xmlwrapp PORTVERSION= 0.7.0 -PORTREVISION= 19 +PORTREVISION= 20 CATEGORIES= textproc MASTER_SITES= SF diff --git a/textproc/zorba/Makefile b/textproc/zorba/Makefile index 3e4fb719740..7fc0efcf6fc 100644 --- a/textproc/zorba/Makefile +++ b/textproc/zorba/Makefile @@ -2,7 +2,7 @@ PORTNAME= zorba PORTVERSION= 2.7.0 -PORTREVISION= 33 +PORTREVISION= 34 CATEGORIES= textproc java MASTER_SITES= https://launchpadlibrarian.net/119058962/ DISTNAME= ${PORTNAME}-src-${PORTVERSION} diff --git a/www/Makefile b/www/Makefile index c21bc44b1b6..989121a56ac 100644 --- a/www/Makefile +++ b/www/Makefile @@ -1607,6 +1607,7 @@ SUBDIR += py-flask-babelex SUBDIR += py-flask-bootstrap SUBDIR += py-flask-cache + SUBDIR += py-flask-caching SUBDIR += py-flask-collect SUBDIR += py-flask-compress SUBDIR += py-flask-cors @@ -2320,6 +2321,7 @@ SUBDIR += varnish-nagios SUBDIR += varnish4 SUBDIR += varnish6 + SUBDIR += varnish7 SUBDIR += varnish_exporter SUBDIR += vdr-plugin-live SUBDIR += vdradmin-am diff --git a/www/aria2/Makefile b/www/aria2/Makefile index 3704649a9e4..e75896bdb99 100644 --- a/www/aria2/Makefile +++ b/www/aria2/Makefile @@ -2,6 +2,7 @@ PORTNAME= aria2 PORTVERSION= 1.36.0 +PORTREVISION= 1 CATEGORIES= www MASTER_SITES= https://github.com/aria2/aria2/releases/download/release-${PORTVERSION}/ \ LOCAL/sunpoet diff --git a/www/bluefish/Makefile b/www/bluefish/Makefile index 4a3b12a12fb..e0688cc8f51 100644 --- a/www/bluefish/Makefile +++ b/www/bluefish/Makefile @@ -2,6 +2,7 @@ PORTNAME= bluefish PORTVERSION= 2.2.12 +PORTREVISION= 1 CATEGORIES= www editors MASTER_SITES= http://www.bennewitz.com/bluefish/stable/source/ \ http://bluefish.mrball.net/stable/source/ diff --git a/www/castget/Makefile b/www/castget/Makefile index eae0ebea187..506bdf6ec36 100644 --- a/www/castget/Makefile +++ b/www/castget/Makefile @@ -2,6 +2,7 @@ PORTNAME= castget DISTVERSION= 2.0.1 +PORTREVISION= 1 CATEGORIES= www MASTER_SITES= SAVANNAH diff --git a/www/chromium/Makefile b/www/chromium/Makefile index a2e9e554b7b..736010d5416 100644 --- a/www/chromium/Makefile +++ b/www/chromium/Makefile @@ -1,7 +1,7 @@ # Created by: Florent Thoumie PORTNAME= chromium -PORTVERSION= 99.0.4844.82 +PORTVERSION= 99.0.4844.84 CATEGORIES= www MASTER_SITES= https://commondatastorage.googleapis.com/chromium-browser-official/ \ https://nerd.hu/distfiles/:fonts diff --git a/www/chromium/distinfo b/www/chromium/distinfo index 275f70c8d20..e70f0b8e14f 100644 --- a/www/chromium/distinfo +++ b/www/chromium/distinfo @@ -1,7 +1,7 @@ -TIMESTAMP = 1647811738 -SHA256 (chromium-99.0.4844.82.tar.xz) = b8bb0bc1410de8b9d98889c79410726a4e78801d4e54ea23b23b7b4015c7d25c -SIZE (chromium-99.0.4844.82.tar.xz) = 1316414584 -SHA256 (chromium-99.0.4844.82-testdata.tar.xz) = 2af7befb56c8b904ca575c9830896c969a5f41ab9854593dbd2f4c79d6824003 -SIZE (chromium-99.0.4844.82-testdata.tar.xz) = 258942728 +TIMESTAMP = 1648281454 +SHA256 (chromium-99.0.4844.84.tar.xz) = 20ec184ed34bdc7e660ccf6c007b2db37007de423b3a5a51698a96aa29527515 +SIZE (chromium-99.0.4844.84.tar.xz) = 1326587000 +SHA256 (chromium-99.0.4844.84-testdata.tar.xz) = 682715d5058c6289cdfcc3795ef65b7d757f3f9b4773a8ebcf22a869b6aa3940 +SIZE (chromium-99.0.4844.84-testdata.tar.xz) = 259949040 SHA256 (test_fonts-cd96fc55dc243f6c6f4cb63ad117cad6cd48dceb.tar.gz) = ec973eccd669d417fbf2d0a4dac730744a3174268ff062db5451b55c82bc3492 SIZE (test_fonts-cd96fc55dc243f6c6f4cb63ad117cad6cd48dceb.tar.gz) = 26759650 diff --git a/www/codeigniter/Makefile b/www/codeigniter/Makefile index 576ea977717..dbc938dca24 100644 --- a/www/codeigniter/Makefile +++ b/www/codeigniter/Makefile @@ -1,7 +1,7 @@ # Created by: Greg Larkin PORTNAME= codeigniter -PORTVERSION= 3.1.11 +PORTVERSION= 3.1.13 CATEGORIES= www MAINTAINER= sunpoet@FreeBSD.org @@ -17,12 +17,11 @@ USE_GITHUB= yes GH_ACCOUNT= bcit-ci GH_PROJECT= CodeIgniter -CONFLICTS= codeigniter-1.[0-9]* codeigniter22-2.[0-9]* - NO_ARCH= yes NO_BUILD= yes PLIST_SUB= WWWOWN=${WWWOWN} WWWGRP=${WWWGRP} +SUB_FILES= pkg-message # These are all user-configurable files that we'll install CI_CONF_FILES= application/config/autoload.php \ @@ -41,11 +40,9 @@ CI_CONF_FILES= application/config/autoload.php \ application/config/user_agents.php \ index.php -OPTIONS_DEFINE= APACHE DOCS MSSQL MYSQLI ODBC PGSQL SQLITE +OPTIONS_DEFINE= APACHE MSSQL MYSQLI ODBC PGSQL SQLITE OPTIONS_SUB= yes -SUB_FILES= pkg-message - APACHE_DESC= Configure for Apache-2.x MSSQL_DESC= Install MSSQL support for PHP MYSQLI_DESC= Install MySQLi support for PHP @@ -55,18 +52,12 @@ SQLITE_DESC= Install SQLite support for PHP APACHE_SUB_FILES= codeigniter.conf APACHE_USES= apache:run -DOCS_VARS= SUB_LIST+=HASHMARK= -DOCS_VARS_OFF= SUB_LIST+=HASHMARK=\# MSSQL_USE= PHP=mssql MYSQLI_USE= PHP=mysqli ODBC_USE= PHP=odbc PGSQL_USE= PHP=pgsql SQLITE_USE= PHP=sqlite3 -post-patch: - @${RM} ${WRKSRC}/user_guide/.buildinfo - @${MV} ${WRKSRC}/readme.rst ${WRKSRC}/user_guide/readme.rst - do-install: cd ${WRKSRC} && ${COPYTREE_SHARE} "application index.php system" ${STAGEDIR}${WWWDIR} .for ci_conf_file in ${CI_CONF_FILES} @@ -77,7 +68,4 @@ do-install-APACHE-on: @${MKDIR} ${STAGEDIR}${PREFIX}/${APACHEETCDIR}/Includes ${INSTALL_DATA} ${WRKDIR}/codeigniter.conf ${STAGEDIR}${PREFIX}/${APACHEETCDIR}/Includes/codeigniter.conf -do-install-DOCS-on: - cd ${WRKSRC}/user_guide && ${COPYTREE_SHARE} . ${STAGEDIR}${DOCSDIR} - .include diff --git a/www/codeigniter/distinfo b/www/codeigniter/distinfo index 79c6f10fd2b..530f790cff6 100644 --- a/www/codeigniter/distinfo +++ b/www/codeigniter/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1569511355 -SHA256 (bcit-ci-CodeIgniter-3.1.11_GH0.tar.gz) = 4d2df1835f5abc792a9ad0c5532577b3b94d20c358f326191f6b50e0dba4160f -SIZE (bcit-ci-CodeIgniter-3.1.11_GH0.tar.gz) = 2221699 +TIMESTAMP = 1647264450 +SHA256 (bcit-ci-CodeIgniter-3.1.13_GH0.tar.gz) = 8cb7bea9db3aa6aaa028ec9793e65b21540ec21890505d8ef03addcba46f9d23 +SIZE (bcit-ci-CodeIgniter-3.1.13_GH0.tar.gz) = 411079 diff --git a/www/codeigniter/files/codeigniter-development-cgi.conf.in b/www/codeigniter/files/codeigniter-development-cgi.conf.in index ec28ab4ab17..820ef5f2a8a 100644 --- a/www/codeigniter/files/codeigniter-development-cgi.conf.in +++ b/www/codeigniter/files/codeigniter-development-cgi.conf.in @@ -5,14 +5,8 @@ DirectoryIndex index.php index.html Action php-script %%PHPCGI%% AddHandler php-script .php -%%HASHMARK%%Alias /codeigniter/user_guide %%DOCSDIR%% Alias /codeigniter %%WWWDIR%% -%%HASHMARK%% -%%HASHMARK%% Order deny,allow -%%HASHMARK%% Allow from all -%%HASHMARK%% - Order deny,allow Allow from all diff --git a/www/codeigniter/files/codeigniter-development.conf.in b/www/codeigniter/files/codeigniter-development.conf.in index b339faae05b..469aa50c187 100644 --- a/www/codeigniter/files/codeigniter-development.conf.in +++ b/www/codeigniter/files/codeigniter-development.conf.in @@ -2,14 +2,8 @@ AddType application/x-httpd-php .php AddType application/x-httpd-php-source .phps DirectoryIndex index.php index.html -%%HASHMARK%%Alias /codeigniter/user_guide %%DOCSDIR%% Alias /codeigniter %%WWWDIR%% -%%HASHMARK%% -%%HASHMARK%% Order deny,allow -%%HASHMARK%% Allow from all -%%HASHMARK%% - Order deny,allow Allow from all diff --git a/www/codeigniter/files/codeigniter-production-cgi.conf.in b/www/codeigniter/files/codeigniter-production-cgi.conf.in index 7cfee81db01..d89689e06f3 100644 --- a/www/codeigniter/files/codeigniter-production-cgi.conf.in +++ b/www/codeigniter/files/codeigniter-production-cgi.conf.in @@ -6,12 +6,6 @@ Action php-script %%PHPCGI%% AddHandler php-script .php DocumentRoot %%WWWDIR%% -%%HASHMARK%%Alias /user_guide %%DOCSDIR%% - -%%HASHMARK%% -%%HASHMARK%% Order deny,allow -%%HASHMARK%% Allow from all -%%HASHMARK%% Order deny,allow diff --git a/www/codeigniter/files/codeigniter-production.conf.in b/www/codeigniter/files/codeigniter-production.conf.in index 2b9b97863cc..f52a024eb33 100644 --- a/www/codeigniter/files/codeigniter-production.conf.in +++ b/www/codeigniter/files/codeigniter-production.conf.in @@ -3,12 +3,6 @@ AddType application/x-httpd-php-source .phps DirectoryIndex index.php index.html DocumentRoot %%WWWDIR%% -%%HASHMARK%%Alias /user_guide %%DOCSDIR%% - -%%HASHMARK%% -%%HASHMARK%% Order deny,allow -%%HASHMARK%% Allow from all -%%HASHMARK%% Order deny,allow diff --git a/www/codeigniter/files/codeigniter.conf.in b/www/codeigniter/files/codeigniter.conf.in index b339faae05b..469aa50c187 100644 --- a/www/codeigniter/files/codeigniter.conf.in +++ b/www/codeigniter/files/codeigniter.conf.in @@ -2,14 +2,8 @@ AddType application/x-httpd-php .php AddType application/x-httpd-php-source .phps DirectoryIndex index.php index.html -%%HASHMARK%%Alias /codeigniter/user_guide %%DOCSDIR%% Alias /codeigniter %%WWWDIR%% -%%HASHMARK%% -%%HASHMARK%% Order deny,allow -%%HASHMARK%% Allow from all -%%HASHMARK%% - Order deny,allow Allow from all diff --git a/www/codeigniter/pkg-plist b/www/codeigniter/pkg-plist index 5a73629161c..8b30e730762 100644 --- a/www/codeigniter/pkg-plist +++ b/www/codeigniter/pkg-plist @@ -1,215 +1,4 @@ %%APACHE%%%%APACHEETCDIR%%/Includes/codeigniter.conf -%%PORTDOCS%%%%DOCSDIR%%/DCO.html -%%PORTDOCS%%%%DOCSDIR%%/_downloads/ELDocs.tmbundle.zip -%%PORTDOCS%%%%DOCSDIR%%/_images/appflowchart.gif -%%PORTDOCS%%%%DOCSDIR%%/_images/smile.gif -%%PORTDOCS%%%%DOCSDIR%%/_static/ajax-loader.gif -%%PORTDOCS%%%%DOCSDIR%%/_static/basic.css -%%PORTDOCS%%%%DOCSDIR%%/_static/ci-icon.ico -%%PORTDOCS%%%%DOCSDIR%%/_static/comment-bright.png -%%PORTDOCS%%%%DOCSDIR%%/_static/comment-close.png -%%PORTDOCS%%%%DOCSDIR%%/_static/comment.png -%%PORTDOCS%%%%DOCSDIR%%/_static/css/badge_only.css -%%PORTDOCS%%%%DOCSDIR%%/_static/css/citheme.css -%%PORTDOCS%%%%DOCSDIR%%/_static/css/theme.css -%%PORTDOCS%%%%DOCSDIR%%/_static/doctools.js -%%PORTDOCS%%%%DOCSDIR%%/_static/down-pressed.png -%%PORTDOCS%%%%DOCSDIR%%/_static/down.png -%%PORTDOCS%%%%DOCSDIR%%/_static/file.png -%%PORTDOCS%%%%DOCSDIR%%/_static/fonts/FontAwesome.otf -%%PORTDOCS%%%%DOCSDIR%%/_static/fonts/fontawesome-webfont.eot -%%PORTDOCS%%%%DOCSDIR%%/_static/fonts/fontawesome-webfont.svg -%%PORTDOCS%%%%DOCSDIR%%/_static/fonts/fontawesome-webfont.ttf -%%PORTDOCS%%%%DOCSDIR%%/_static/fonts/fontawesome-webfont.woff -%%PORTDOCS%%%%DOCSDIR%%/_static/images/ci-icon.ico -%%PORTDOCS%%%%DOCSDIR%%/_static/jquery-3.1.0.js -%%PORTDOCS%%%%DOCSDIR%%/_static/jquery.js -%%PORTDOCS%%%%DOCSDIR%%/_static/js/oldtheme.js -%%PORTDOCS%%%%DOCSDIR%%/_static/js/theme.js -%%PORTDOCS%%%%DOCSDIR%%/_static/minus.png -%%PORTDOCS%%%%DOCSDIR%%/_static/plus.png -%%PORTDOCS%%%%DOCSDIR%%/_static/pygments.css -%%PORTDOCS%%%%DOCSDIR%%/_static/searchtools.js -%%PORTDOCS%%%%DOCSDIR%%/_static/underscore-1.3.1.js -%%PORTDOCS%%%%DOCSDIR%%/_static/underscore.js -%%PORTDOCS%%%%DOCSDIR%%/_static/up-pressed.png -%%PORTDOCS%%%%DOCSDIR%%/_static/up.png -%%PORTDOCS%%%%DOCSDIR%%/_static/websupport.js -%%PORTDOCS%%%%DOCSDIR%%/changelog.html -%%PORTDOCS%%%%DOCSDIR%%/contributing/index.html -%%PORTDOCS%%%%DOCSDIR%%/database/caching.html -%%PORTDOCS%%%%DOCSDIR%%/database/call_function.html -%%PORTDOCS%%%%DOCSDIR%%/database/configuration.html -%%PORTDOCS%%%%DOCSDIR%%/database/connecting.html -%%PORTDOCS%%%%DOCSDIR%%/database/db_driver_reference.html -%%PORTDOCS%%%%DOCSDIR%%/database/examples.html -%%PORTDOCS%%%%DOCSDIR%%/database/forge.html -%%PORTDOCS%%%%DOCSDIR%%/database/helpers.html -%%PORTDOCS%%%%DOCSDIR%%/database/index.html -%%PORTDOCS%%%%DOCSDIR%%/database/metadata.html -%%PORTDOCS%%%%DOCSDIR%%/database/queries.html -%%PORTDOCS%%%%DOCSDIR%%/database/query_builder.html -%%PORTDOCS%%%%DOCSDIR%%/database/results.html -%%PORTDOCS%%%%DOCSDIR%%/database/transactions.html -%%PORTDOCS%%%%DOCSDIR%%/database/utilities.html -%%PORTDOCS%%%%DOCSDIR%%/documentation/index.html -%%PORTDOCS%%%%DOCSDIR%%/general/alternative_php.html -%%PORTDOCS%%%%DOCSDIR%%/general/ancillary_classes.html -%%PORTDOCS%%%%DOCSDIR%%/general/autoloader.html -%%PORTDOCS%%%%DOCSDIR%%/general/caching.html -%%PORTDOCS%%%%DOCSDIR%%/general/cli.html -%%PORTDOCS%%%%DOCSDIR%%/general/common_functions.html -%%PORTDOCS%%%%DOCSDIR%%/general/compatibility_functions.html -%%PORTDOCS%%%%DOCSDIR%%/general/controllers.html -%%PORTDOCS%%%%DOCSDIR%%/general/core_classes.html -%%PORTDOCS%%%%DOCSDIR%%/general/creating_drivers.html -%%PORTDOCS%%%%DOCSDIR%%/general/creating_libraries.html -%%PORTDOCS%%%%DOCSDIR%%/general/credits.html -%%PORTDOCS%%%%DOCSDIR%%/general/drivers.html -%%PORTDOCS%%%%DOCSDIR%%/general/environments.html -%%PORTDOCS%%%%DOCSDIR%%/general/errors.html -%%PORTDOCS%%%%DOCSDIR%%/general/helpers.html -%%PORTDOCS%%%%DOCSDIR%%/general/hooks.html -%%PORTDOCS%%%%DOCSDIR%%/general/index.html -%%PORTDOCS%%%%DOCSDIR%%/general/libraries.html -%%PORTDOCS%%%%DOCSDIR%%/general/managing_apps.html -%%PORTDOCS%%%%DOCSDIR%%/general/models.html -%%PORTDOCS%%%%DOCSDIR%%/general/profiling.html -%%PORTDOCS%%%%DOCSDIR%%/general/requirements.html -%%PORTDOCS%%%%DOCSDIR%%/general/reserved_names.html -%%PORTDOCS%%%%DOCSDIR%%/general/routing.html -%%PORTDOCS%%%%DOCSDIR%%/general/security.html -%%PORTDOCS%%%%DOCSDIR%%/general/styleguide.html -%%PORTDOCS%%%%DOCSDIR%%/general/urls.html -%%PORTDOCS%%%%DOCSDIR%%/general/views.html -%%PORTDOCS%%%%DOCSDIR%%/general/welcome.html -%%PORTDOCS%%%%DOCSDIR%%/genindex.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/array_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/captcha_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/cookie_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/date_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/directory_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/download_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/email_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/file_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/form_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/html_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/index.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/inflector_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/language_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/number_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/path_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/security_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/smiley_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/string_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/text_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/typography_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/url_helper.html -%%PORTDOCS%%%%DOCSDIR%%/helpers/xml_helper.html -%%PORTDOCS%%%%DOCSDIR%%/index.html -%%PORTDOCS%%%%DOCSDIR%%/installation/downloads.html -%%PORTDOCS%%%%DOCSDIR%%/installation/index.html -%%PORTDOCS%%%%DOCSDIR%%/installation/troubleshooting.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_120.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_130.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_131.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_132.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_133.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_140.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_141.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_150.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_152.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_153.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_154.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_160.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_161.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_162.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_163.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_170.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_171.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_172.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_200.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_201.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_202.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_203.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_210.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_211.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_212.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_213.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_214.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_220.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_221.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_222.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_223.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_300.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_301.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_302.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_303.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_304.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_305.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_306.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_310.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_311.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_3110.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_3111.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_312.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_313.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_314.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_315.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_316.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_317.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_318.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_319.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrade_b11.html -%%PORTDOCS%%%%DOCSDIR%%/installation/upgrading.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/benchmark.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/caching.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/calendar.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/cart.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/config.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/email.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/encrypt.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/encryption.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/file_uploading.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/form_validation.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/ftp.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/image_lib.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/index.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/input.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/javascript.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/language.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/loader.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/migration.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/output.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/pagination.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/parser.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/security.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/sessions.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/table.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/trackback.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/typography.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/unit_testing.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/uri.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/user_agent.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/xmlrpc.html -%%PORTDOCS%%%%DOCSDIR%%/libraries/zip.html -%%PORTDOCS%%%%DOCSDIR%%/license.html -%%PORTDOCS%%%%DOCSDIR%%/objects.inv -%%PORTDOCS%%%%DOCSDIR%%/overview/appflow.html -%%PORTDOCS%%%%DOCSDIR%%/overview/at_a_glance.html -%%PORTDOCS%%%%DOCSDIR%%/overview/features.html -%%PORTDOCS%%%%DOCSDIR%%/overview/getting_started.html -%%PORTDOCS%%%%DOCSDIR%%/overview/goals.html -%%PORTDOCS%%%%DOCSDIR%%/overview/index.html -%%PORTDOCS%%%%DOCSDIR%%/overview/mvc.html -%%PORTDOCS%%%%DOCSDIR%%/readme.rst -%%PORTDOCS%%%%DOCSDIR%%/search.html -%%PORTDOCS%%%%DOCSDIR%%/searchindex.js -%%PORTDOCS%%%%DOCSDIR%%/tutorial/conclusion.html -%%PORTDOCS%%%%DOCSDIR%%/tutorial/create_news_items.html -%%PORTDOCS%%%%DOCSDIR%%/tutorial/index.html -%%PORTDOCS%%%%DOCSDIR%%/tutorial/news_section.html -%%PORTDOCS%%%%DOCSDIR%%/tutorial/static_pages.html @dir(%%WWWOWN%%,%%WWWGRP%%,) %%WWWDIR%%/application/cache %%WWWDIR%%/application/.htaccess %%WWWDIR%%/application/cache/index.html @@ -432,15 +221,19 @@ %%WWWDIR%%/system/libraries/Form_validation.php %%WWWDIR%%/system/libraries/Ftp.php %%WWWDIR%%/system/libraries/Image_lib.php +%%WWWDIR%%/system/libraries/Javascript.php %%WWWDIR%%/system/libraries/Javascript/Jquery.php %%WWWDIR%%/system/libraries/Javascript/index.html -%%WWWDIR%%/system/libraries/Javascript.php %%WWWDIR%%/system/libraries/Migration.php %%WWWDIR%%/system/libraries/Pagination.php %%WWWDIR%%/system/libraries/Parser.php %%WWWDIR%%/system/libraries/Profiler.php +%%WWWDIR%%/system/libraries/Session/CI_Session_driver_interface.php +%%WWWDIR%%/system/libraries/Session/OldSessionWrapper.php +%%WWWDIR%%/system/libraries/Session/PHP8SessionWrapper.php %%WWWDIR%%/system/libraries/Session/Session.php %%WWWDIR%%/system/libraries/Session/SessionHandlerInterface.php +%%WWWDIR%%/system/libraries/Session/SessionUpdateTimestampHandlerInterface.php %%WWWDIR%%/system/libraries/Session/Session_driver.php %%WWWDIR%%/system/libraries/Session/drivers/Session_database_driver.php %%WWWDIR%%/system/libraries/Session/drivers/Session_files_driver.php diff --git a/www/cssed/Makefile b/www/cssed/Makefile index bcaa9b62157..92869201f93 100644 --- a/www/cssed/Makefile +++ b/www/cssed/Makefile @@ -2,7 +2,7 @@ PORTNAME= cssed PORTVERSION= 0.4.0 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= www MASTER_SITES= SF diff --git a/www/davix/Makefile b/www/davix/Makefile index 2a3554f295a..0c86f690031 100644 --- a/www/davix/Makefile +++ b/www/davix/Makefile @@ -1,5 +1,6 @@ PORTNAME= davix DISTVERSION= 0.7.6 +PORTREVISION= 1 CATEGORIES= www MASTER_SITES= http://grid-deployment.web.cern.ch/grid-deployment/dms/lcgutil/tar/${PORTNAME}/${DISTVERSION}/ diff --git a/www/deno/Makefile b/www/deno/Makefile index d9fb9ddb2ad..f44870761e2 100644 --- a/www/deno/Makefile +++ b/www/deno/Makefile @@ -1,6 +1,6 @@ PORTNAME= deno DISTVERSIONPREFIX= v -DISTVERSION= 1.20.1 +DISTVERSION= 1.20.3 CATEGORIES= www MAINTAINER= mikael@FreeBSD.org @@ -46,10 +46,9 @@ CARGO_CRATES= Inflector-0.11.4 \ ansi_term-0.12.1 \ anyhow-1.0.55 \ arrayvec-0.4.12 \ - arrayvec-0.5.2 \ arrayvec-0.7.2 \ ash-0.34.0+1.2.203 \ - ast_node-0.7.6 \ + ast_node-0.7.7 \ async-compression-0.3.8 \ async-stream-0.3.2 \ async-stream-impl-0.3.2 \ @@ -74,13 +73,11 @@ CARGO_CRATES= Inflector-0.11.4 \ block-padding-0.2.1 \ brotli-3.3.3 \ brotli-decompressor-2.3.2 \ - build_const-0.2.2 \ bumpalo-3.9.1 \ byteorder-1.4.3 \ bytes-1.1.0 \ cache_control-0.2.0 \ cc-1.0.72 \ - cfg-if-0.1.10 \ cfg-if-1.0.0 \ cfg_aliases-0.1.1 \ chrono-0.4.19 \ @@ -97,7 +94,8 @@ CARGO_CRATES= Inflector-0.11.4 \ core-foundation-sys-0.8.3 \ core-graphics-types-0.1.1 \ cpufeatures-0.2.1 \ - crc-1.8.1 \ + crc-2.1.0 \ + crc-catalog-1.1.1 \ crc32fast-1.3.0 \ crossbeam-channel-0.5.2 \ crossbeam-utils-0.8.6 \ @@ -112,14 +110,15 @@ CARGO_CRATES= Inflector-0.11.4 \ darling_core-0.10.2 \ darling_macro-0.10.2 \ dashmap-4.0.2 \ + dashmap-5.2.0 \ data-encoding-2.3.2 \ data-url-0.1.1 \ debug_unreachable-0.1.1 \ - deno_ast-0.12.0 \ - deno_doc-0.32.0 \ - deno_graph-0.24.0 \ - deno_lint-0.26.0 \ - deno_task_shell-0.1.9 \ + deno_ast-0.13.0 \ + deno_doc-0.33.0 \ + deno_graph-0.25.0 \ + deno_lint-0.28.0 \ + deno_task_shell-0.2.0 \ der-0.4.5 \ derive_more-0.99.17 \ diff-0.1.12 \ @@ -131,8 +130,8 @@ CARGO_CRATES= Inflector-0.11.4 \ dprint-core-0.50.0 \ dprint-plugin-json-0.14.1 \ dprint-plugin-markdown-0.12.2 \ - dprint-plugin-typescript-0.64.3 \ - dprint-swc-ecma-ast-view-0.50.0 \ + dprint-plugin-typescript-0.65.1 \ + dprint-swc-ecma-ast-view-0.54.0 \ dyn-clone-1.0.4 \ ecdsa-0.12.4 \ either-1.6.1 \ @@ -144,14 +143,14 @@ CARGO_CRATES= Inflector-0.11.4 \ env_logger-0.8.4 \ errno-0.1.8 \ error-code-2.3.0 \ - eszip-0.17.0 \ + eszip-0.18.0 \ fallible-iterator-0.2.0 \ fallible-streaming-iterator-0.1.9 \ fancy-regex-0.7.1 \ fd-lock-3.0.2 \ ff-0.10.1 \ filetime-0.2.15 \ - fixedbitset-0.2.0 \ + fixedbitset-0.4.1 \ flaky_test-0.1.0 \ flate2-1.0.22 \ fly-accept-encoding-0.2.0-alpha.5 \ @@ -221,14 +220,19 @@ CARGO_CRATES= Inflector-0.11.4 \ kqueue-1.0.4 \ kqueue-sys-1.0.3 \ lazy_static-1.4.0 \ - lexical-5.2.2 \ - lexical-core-0.7.6 \ + lexical-6.1.0 \ + lexical-core-0.8.3 \ + lexical-parse-float-0.8.3 \ + lexical-parse-integer-0.8.3 \ + lexical-util-0.8.3 \ + lexical-write-float-0.8.4 \ + lexical-write-integer-0.8.3 \ libc-0.2.106 \ libffi-2.0.0 \ libffi-sys-1.3.0 \ libloading-0.7.3 \ libm-0.2.1 \ - libsqlite3-sys-0.22.2 \ + libsqlite3-sys-0.24.1 \ linked-hash-map-0.5.4 \ lock_api-0.4.6 \ log-0.4.14 \ @@ -256,35 +260,39 @@ CARGO_CRATES= Inflector-0.11.4 \ nodrop-0.1.14 \ notify-5.0.0-pre.12 \ ntapi-0.3.6 \ - num-bigint-0.2.6 \ + num-bigint-0.4.3 \ num-bigint-dig-0.7.0 \ num-format-0.4.0 \ num-integer-0.1.44 \ num-iter-0.1.42 \ num-traits-0.2.14 \ - num_cpus-1.13.0 \ + num_cpus-1.13.1 \ objc-0.2.7 \ objc_exception-0.1.2 \ - once_cell-1.9.0 \ + once_cell-1.10.0 \ opaque-debug-0.3.0 \ openssl-probe-0.1.5 \ os_pipe-1.0.1 \ os_str_bytes-6.0.0 \ output_vt100-0.1.2 \ - owning_ref-0.4.1 \ p256-0.9.0 \ p384-0.8.0 \ parking_lot-0.11.2 \ + parking_lot-0.12.0 \ parking_lot_core-0.8.5 \ + parking_lot_core-0.9.1 \ path-clean-0.1.0 \ + path-dedot-3.0.16 \ pem-rfc7468-0.2.4 \ percent-encoding-2.1.0 \ pest-2.1.3 \ - petgraph-0.5.1 \ - phf-0.8.0 \ + petgraph-0.6.0 \ + phf-0.10.1 \ phf_generator-0.8.0 \ - phf_macros-0.8.0 \ + phf_generator-0.10.0 \ + phf_macros-0.10.0 \ phf_shared-0.8.0 \ + phf_shared-0.10.0 \ pin-project-1.0.8 \ pin-project-internal-1.0.8 \ pin-project-lite-0.2.8 \ @@ -334,7 +342,7 @@ CARGO_CRATES= Inflector-0.11.4 \ ring-0.16.20 \ ron-0.7.0 \ rsa-0.5.0 \ - rusqlite-0.25.4 \ + rusqlite-0.27.0 \ rustc-hash-1.1.0 \ rustc_version-0.2.3 \ rustc_version-0.4.0 \ @@ -363,6 +371,7 @@ CARGO_CRATES= Inflector-0.11.4 \ serde_repr-0.1.7 \ serde_urlencoded-0.7.1 \ sha-1-0.9.8 \ + sha-1-0.10.0 \ sha2-0.9.9 \ sha2-0.10.1 \ shell-escape-0.1.5 \ @@ -378,38 +387,37 @@ CARGO_CRATES= Inflector-0.11.4 \ spin-0.5.2 \ spirv-0.2.0+1.5.4 \ spki-0.4.1 \ - stable_deref_trait-1.2.0 \ static_assertions-1.1.0 \ str-buf-1.0.5 \ - string_cache-0.8.2 \ + string_cache-0.8.3 \ string_cache_codegen-0.5.1 \ string_enum-0.3.1 \ strsim-0.9.3 \ strsim-0.10.0 \ subtle-2.4.1 \ swc_atoms-0.2.9 \ - swc_bundler-0.107.0 \ - swc_common-0.17.5 \ - swc_ecma_ast-0.65.3 \ - swc_ecma_codegen-0.90.0 \ - swc_ecma_codegen_macros-0.6.0 \ - swc_ecma_dep_graph-0.59.0 \ - swc_ecma_loader-0.28.0 \ - swc_ecma_parser-0.88.3 \ - swc_ecma_transforms-0.117.0 \ - swc_ecma_transforms_base-0.58.3 \ - swc_ecma_transforms_classes-0.46.0 \ + swc_bundler-0.127.0 \ + swc_common-0.17.18 \ + swc_ecma_ast-0.71.0 \ + swc_ecma_codegen-0.98.1 \ + swc_ecma_codegen_macros-0.7.0 \ + swc_ecma_dep_graph-0.67.0 \ + swc_ecma_loader-0.29.0 \ + swc_ecma_parser-0.95.1 \ + swc_ecma_transforms-0.136.0 \ + swc_ecma_transforms_base-0.70.1 \ + swc_ecma_transforms_classes-0.58.0 \ swc_ecma_transforms_macros-0.3.0 \ - swc_ecma_transforms_optimization-0.87.0 \ - swc_ecma_transforms_proposal-0.77.0 \ - swc_ecma_transforms_react-0.80.0 \ - swc_ecma_transforms_typescript-0.82.0 \ - swc_ecma_utils-0.65.3 \ - swc_ecma_visit-0.51.1 \ - swc_ecmascript-0.114.3 \ + swc_ecma_transforms_optimization-0.106.0 \ + swc_ecma_transforms_proposal-0.91.0 \ + swc_ecma_transforms_react-0.98.0 \ + swc_ecma_transforms_typescript-0.101.0 \ + swc_ecma_utils-0.74.0 \ + swc_ecma_visit-0.57.0 \ + swc_ecmascript-0.137.0 \ swc_eq_ignore_macros-0.1.0 \ - swc_fast_graph-0.4.0 \ - swc_graph_analyzer-0.4.0 \ + swc_fast_graph-0.5.0 \ + swc_graph_analyzer-0.6.0 \ swc_macros_common-0.3.2 \ swc_visit-0.3.0 \ swc_visit_macros-0.3.0 \ @@ -435,9 +443,9 @@ CARGO_CRATES= Inflector-0.11.4 \ tokio-util-0.6.9 \ toml-0.5.8 \ tower-service-0.3.1 \ - tracing-0.1.29 \ - tracing-attributes-0.1.18 \ - tracing-core-0.1.21 \ + tracing-0.1.32 \ + tracing-attributes-0.1.20 \ + tracing-core-0.1.23 \ trust-dns-client-0.20.3 \ trust-dns-proto-0.20.3 \ trust-dns-resolver-0.20.3 \ @@ -456,6 +464,7 @@ CARGO_CRATES= Inflector-0.11.4 \ unic-ucd-version-0.9.0 \ unicase-2.6.0 \ unicode-bidi-0.3.7 \ + unicode-id-0.3.0 \ unicode-normalization-0.1.19 \ unicode-segmentation-1.8.0 \ unicode-width-0.1.9 \ @@ -498,11 +507,17 @@ CARGO_CRATES= Inflector-0.11.4 \ winapi-util-0.1.5 \ winapi-x86_64-pc-windows-gnu-0.4.0 \ windows-sys-0.28.0 \ + windows-sys-0.32.0 \ windows_aarch64_msvc-0.28.0 \ + windows_aarch64_msvc-0.32.0 \ windows_i686_gnu-0.28.0 \ + windows_i686_gnu-0.32.0 \ windows_i686_msvc-0.28.0 \ + windows_i686_msvc-0.32.0 \ windows_x86_64_gnu-0.28.0 \ + windows_x86_64_gnu-0.32.0 \ windows_x86_64_msvc-0.28.0 \ + windows_x86_64_msvc-0.32.0 \ winreg-0.6.2 \ winreg-0.10.1 \ winres-0.1.11 \ diff --git a/www/deno/distinfo b/www/deno/distinfo index fa87879ce23..5f83060d768 100644 --- a/www/deno/distinfo +++ b/www/deno/distinfo @@ -1,4 +1,4 @@ -TIMESTAMP = 1648026344 +TIMESTAMP = 1648453612 SHA256 (rust/crates/Inflector-0.11.4.crate) = fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3 SIZE (rust/crates/Inflector-0.11.4.crate) = 17438 SHA256 (rust/crates/abort_on_panic-2.0.0.crate) = 955f37ac58af2416bac687c8ab66a4ccba282229bd7422a28d2281a5e66a6116 @@ -27,14 +27,12 @@ SHA256 (rust/crates/anyhow-1.0.55.crate) = 159bb86af3a200e19a068f4224eae4c8bb2d0 SIZE (rust/crates/anyhow-1.0.55.crate) = 44429 SHA256 (rust/crates/arrayvec-0.4.12.crate) = cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9 SIZE (rust/crates/arrayvec-0.4.12.crate) = 26551 -SHA256 (rust/crates/arrayvec-0.5.2.crate) = 23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b -SIZE (rust/crates/arrayvec-0.5.2.crate) = 27838 SHA256 (rust/crates/arrayvec-0.7.2.crate) = 8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6 SIZE (rust/crates/arrayvec-0.7.2.crate) = 29341 SHA256 (rust/crates/ash-0.34.0+1.2.203.crate) = b0f780da53d0063880d45554306489f09dd8d1bda47688b4a57bc579119356df SIZE (rust/crates/ash-0.34.0+1.2.203.crate) = 372762 -SHA256 (rust/crates/ast_node-0.7.6.crate) = 82b2dd56b7c509b3a0bb47a97a066cba459983470d3b8a3c20428737270f70bd -SIZE (rust/crates/ast_node-0.7.6.crate) = 7867 +SHA256 (rust/crates/ast_node-0.7.7.crate) = bc4c00309ed1c8104732df4a5fa9acc3b796b6f8531dfbd5ce0078c86f997244 +SIZE (rust/crates/ast_node-0.7.7.crate) = 8002 SHA256 (rust/crates/async-compression-0.3.8.crate) = 5443ccbb270374a2b1055fc72da40e1f237809cd6bb0e97e66d264cd138473a6 SIZE (rust/crates/async-compression-0.3.8.crate) = 57977 SHA256 (rust/crates/async-stream-0.3.2.crate) = 171374e7e3b2504e0e5236e3b59260560f9fe94bfe9ac39ba5e4e929c5590625 @@ -83,8 +81,6 @@ SHA256 (rust/crates/brotli-3.3.3.crate) = f838e47a451d5a8fa552371f80024dd6ace9b7 SIZE (rust/crates/brotli-3.3.3.crate) = 1369025 SHA256 (rust/crates/brotli-decompressor-2.3.2.crate) = 59ad2d4653bf5ca36ae797b1f4bb4dbddb60ce49ca4aed8a2ce4829f60425b80 SIZE (rust/crates/brotli-decompressor-2.3.2.crate) = 191197 -SHA256 (rust/crates/build_const-0.2.2.crate) = b4ae4235e6dac0694637c763029ecea1a2ec9e4e06ec2729bd21ba4d9c863eb7 -SIZE (rust/crates/build_const-0.2.2.crate) = 4511 SHA256 (rust/crates/bumpalo-3.9.1.crate) = a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899 SIZE (rust/crates/bumpalo-3.9.1.crate) = 77507 SHA256 (rust/crates/byteorder-1.4.3.crate) = 14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610 @@ -95,8 +91,6 @@ SHA256 (rust/crates/cache_control-0.2.0.crate) = 1bf2a5fb3207c12b5d208ebc145f967 SIZE (rust/crates/cache_control-0.2.0.crate) = 2738 SHA256 (rust/crates/cc-1.0.72.crate) = 22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee SIZE (rust/crates/cc-1.0.72.crate) = 57495 -SHA256 (rust/crates/cfg-if-0.1.10.crate) = 4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822 -SIZE (rust/crates/cfg-if-0.1.10.crate) = 7933 SHA256 (rust/crates/cfg-if-1.0.0.crate) = baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd SIZE (rust/crates/cfg-if-1.0.0.crate) = 7934 SHA256 (rust/crates/cfg_aliases-0.1.1.crate) = fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e @@ -129,8 +123,10 @@ SHA256 (rust/crates/core-graphics-types-0.1.1.crate) = 3a68b68b3446082644c91ac77 SIZE (rust/crates/core-graphics-types-0.1.1.crate) = 2530 SHA256 (rust/crates/cpufeatures-0.2.1.crate) = 95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469 SIZE (rust/crates/cpufeatures-0.2.1.crate) = 10500 -SHA256 (rust/crates/crc-1.8.1.crate) = d663548de7f5cca343f1e0a48d14dcfb0e9eb4e079ec58883b7251539fa10aeb -SIZE (rust/crates/crc-1.8.1.crate) = 9114 +SHA256 (rust/crates/crc-2.1.0.crate) = 49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23 +SIZE (rust/crates/crc-2.1.0.crate) = 8683 +SHA256 (rust/crates/crc-catalog-1.1.1.crate) = ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403 +SIZE (rust/crates/crc-catalog-1.1.1.crate) = 8698 SHA256 (rust/crates/crc32fast-1.3.0.crate) = 738c290dfaea84fc1ca15ad9c168d083b05a714e1efddd8edaab678dc28d2836 SIZE (rust/crates/crc32fast-1.3.0.crate) = 38565 SHA256 (rust/crates/crossbeam-channel-0.5.2.crate) = e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa @@ -159,22 +155,24 @@ SHA256 (rust/crates/darling_macro-0.10.2.crate) = d9b5a2f4ac4969822c62224815d069 SIZE (rust/crates/darling_macro-0.10.2.crate) = 1919 SHA256 (rust/crates/dashmap-4.0.2.crate) = e77a43b28d0668df09411cb0bc9a8c2adc40f9a048afe863e05fd43251e8e39c SIZE (rust/crates/dashmap-4.0.2.crate) = 21066 +SHA256 (rust/crates/dashmap-5.2.0.crate) = 4c8858831f7781322e539ea39e72449c46b059638250c14344fec8d0aa6e539c +SIZE (rust/crates/dashmap-5.2.0.crate) = 20221 SHA256 (rust/crates/data-encoding-2.3.2.crate) = 3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57 SIZE (rust/crates/data-encoding-2.3.2.crate) = 19159 SHA256 (rust/crates/data-url-0.1.1.crate) = 3a30bfce702bcfa94e906ef82421f2c0e61c076ad76030c16ee5d2e9a32fe193 SIZE (rust/crates/data-url-0.1.1.crate) = 20039 SHA256 (rust/crates/debug_unreachable-0.1.1.crate) = 9a032eac705ca39214d169f83e3d3da290af06d8d1d344d1baad2fd002dca4b3 SIZE (rust/crates/debug_unreachable-0.1.1.crate) = 1184 -SHA256 (rust/crates/deno_ast-0.12.0.crate) = 87226a2fc1270fe8037e1d0c9048d6605c2ed938abceda2570fe047aa70608db -SIZE (rust/crates/deno_ast-0.12.0.crate) = 40822 -SHA256 (rust/crates/deno_doc-0.32.0.crate) = 4df27abd2f29f18578cc22e27ea8d8cd2071e19ecb514cf88afceac889695fb9 -SIZE (rust/crates/deno_doc-0.32.0.crate) = 1202568 -SHA256 (rust/crates/deno_graph-0.24.0.crate) = d5930dfda71f3a9d066520e5b5f3905d6d27b1247fc46a9037dd2ad1c0dbe193 -SIZE (rust/crates/deno_graph-0.24.0.crate) = 1044275 -SHA256 (rust/crates/deno_lint-0.26.0.crate) = 1ab849e6b8c29e30dbdc8e4ac19d48c15ab14ca6b523b1b0d913623fb103eda1 -SIZE (rust/crates/deno_lint-0.26.0.crate) = 257319 -SHA256 (rust/crates/deno_task_shell-0.1.9.crate) = b4b477c481d76502130fc5a212900fbe9da9520ad65c54c7e6a7cb129df082fb -SIZE (rust/crates/deno_task_shell-0.1.9.crate) = 23580 +SHA256 (rust/crates/deno_ast-0.13.0.crate) = ee0863488d08b49241172ad1257c3cac302537c3a337a742debbd6adbccb0607 +SIZE (rust/crates/deno_ast-0.13.0.crate) = 40122 +SHA256 (rust/crates/deno_doc-0.33.0.crate) = bf59895add613edf3e25937de55c6f6cc5ccfc163fe93d57a08687718c629f08 +SIZE (rust/crates/deno_doc-0.33.0.crate) = 1156507 +SHA256 (rust/crates/deno_graph-0.25.0.crate) = 3439f9c0ef8526fa6ee07cd1fe40ce5da94c7efc8c8e130e87e02a4e087ba6d8 +SIZE (rust/crates/deno_graph-0.25.0.crate) = 1005164 +SHA256 (rust/crates/deno_lint-0.28.0.crate) = ccd715d5412f54ddd4ac70db8062112c8d975b81a8563a9fa506183a5566c3ec +SIZE (rust/crates/deno_lint-0.28.0.crate) = 259052 +SHA256 (rust/crates/deno_task_shell-0.2.0.crate) = 4f7cfd6605b7291387e54a732e5b5fa8c1fb0d6757dbf4309740ad79c1e1e215 +SIZE (rust/crates/deno_task_shell-0.2.0.crate) = 26325 SHA256 (rust/crates/der-0.4.5.crate) = 79b71cca7d95d7681a4b3b9cdf63c8dbc3730d0584c2c74e31416d64a90493f4 SIZE (rust/crates/der-0.4.5.crate) = 43641 SHA256 (rust/crates/derive_more-0.99.17.crate) = 4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321 @@ -197,10 +195,10 @@ SHA256 (rust/crates/dprint-plugin-json-0.14.1.crate) = 5d7673b479d64372e60e34472 SIZE (rust/crates/dprint-plugin-json-0.14.1.crate) = 18234 SHA256 (rust/crates/dprint-plugin-markdown-0.12.2.crate) = 00a8c4e905bf5c95dbcc6eab7d81a1d884f7fbd0e7f2fd99af0154692fabc8a8 SIZE (rust/crates/dprint-plugin-markdown-0.12.2.crate) = 34135 -SHA256 (rust/crates/dprint-plugin-typescript-0.64.3.crate) = 7ee73a5b621d6d43ae700a62b757464a53477cff956593327cb7f6d07cceff4e -SIZE (rust/crates/dprint-plugin-typescript-0.64.3.crate) = 1004888 -SHA256 (rust/crates/dprint-swc-ecma-ast-view-0.50.0.crate) = 23172ec7d673a2cd0d755619ab7d6103a8f5fc3d0f6d1ddfa2aee8e9d07ad863 -SIZE (rust/crates/dprint-swc-ecma-ast-view-0.50.0.crate) = 65888 +SHA256 (rust/crates/dprint-plugin-typescript-0.65.1.crate) = e482c9fe4929ec672d2bdaf0b6dcfee88684ec23858f0c54f225b565f08882ac +SIZE (rust/crates/dprint-plugin-typescript-0.65.1.crate) = 1005671 +SHA256 (rust/crates/dprint-swc-ecma-ast-view-0.54.0.crate) = a88404348c269ecfe3c753f44caddf954cdb5fba5b55d68d3653202a96b200af +SIZE (rust/crates/dprint-swc-ecma-ast-view-0.54.0.crate) = 67028 SHA256 (rust/crates/dyn-clone-1.0.4.crate) = ee2626afccd7561a06cf1367e2950c4718ea04565e20fb5029b6c7d8ad09abcf SIZE (rust/crates/dyn-clone-1.0.4.crate) = 9654 SHA256 (rust/crates/ecdsa-0.12.4.crate) = 43ee23aa5b4f68c7a092b5c3beb25f50c406adc75e2363634f242f28ab255372 @@ -223,8 +221,8 @@ SHA256 (rust/crates/errno-0.1.8.crate) = 1e2b2decb0484e15560df3210cf0d78654bb086 SIZE (rust/crates/errno-0.1.8.crate) = 3199 SHA256 (rust/crates/error-code-2.3.0.crate) = b5115567ac25674e0043e472be13d14e537f37ea8aa4bdc4aef0c89add1db1ff SIZE (rust/crates/error-code-2.3.0.crate) = 5327 -SHA256 (rust/crates/eszip-0.17.0.crate) = 7b6792e543dd8ce5c15434fa1d5b2a10c51d0f28b81b08d420afc17bcd69f55c -SIZE (rust/crates/eszip-0.17.0.crate) = 143836 +SHA256 (rust/crates/eszip-0.18.0.crate) = c9ad96edadcceb5992a4c433528a9100ef6ae03bafd364a70920888812edb334 +SIZE (rust/crates/eszip-0.18.0.crate) = 125669 SHA256 (rust/crates/fallible-iterator-0.2.0.crate) = 4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7 SIZE (rust/crates/fallible-iterator-0.2.0.crate) = 18509 SHA256 (rust/crates/fallible-streaming-iterator-0.1.9.crate) = 7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a @@ -237,8 +235,8 @@ SHA256 (rust/crates/ff-0.10.1.crate) = d0f40b2dcd8bc322217a5f6559ae5f9e9d1de202a SIZE (rust/crates/ff-0.10.1.crate) = 11938 SHA256 (rust/crates/filetime-0.2.15.crate) = 975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98 SIZE (rust/crates/filetime-0.2.15.crate) = 14511 -SHA256 (rust/crates/fixedbitset-0.2.0.crate) = 37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d -SIZE (rust/crates/fixedbitset-0.2.0.crate) = 13597 +SHA256 (rust/crates/fixedbitset-0.4.1.crate) = 279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e +SIZE (rust/crates/fixedbitset-0.4.1.crate) = 15551 SHA256 (rust/crates/flaky_test-0.1.0.crate) = 479cde5eb168cf5a056dd98f311cbfab7494c216394e4fb9eba0336827a8db93 SIZE (rust/crates/flaky_test-0.1.0.crate) = 2401 SHA256 (rust/crates/flate2-1.0.22.crate) = 1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f @@ -377,10 +375,20 @@ SHA256 (rust/crates/kqueue-sys-1.0.3.crate) = 8367585489f01bc55dd27404dcf56b95e6 SIZE (rust/crates/kqueue-sys-1.0.3.crate) = 6673 SHA256 (rust/crates/lazy_static-1.4.0.crate) = e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646 SIZE (rust/crates/lazy_static-1.4.0.crate) = 10443 -SHA256 (rust/crates/lexical-5.2.2.crate) = f404a90a744e32e8be729034fc33b90cf2a56418fbf594d69aa3c0214ad414e5 -SIZE (rust/crates/lexical-5.2.2.crate) = 102038 -SHA256 (rust/crates/lexical-core-0.7.6.crate) = 6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe -SIZE (rust/crates/lexical-core-0.7.6.crate) = 494385 +SHA256 (rust/crates/lexical-6.1.0.crate) = ccd3e434c16f0164124ade12dcdee324fcc3dafb1cad0c7f1d8c2451a1aa6886 +SIZE (rust/crates/lexical-6.1.0.crate) = 24280 +SHA256 (rust/crates/lexical-core-0.8.3.crate) = 92912c4af2e7d9075be3e5e3122c4d7263855fa6cce34fbece4dd08e5884624d +SIZE (rust/crates/lexical-core-0.8.3.crate) = 26779 +SHA256 (rust/crates/lexical-parse-float-0.8.3.crate) = f518eed87c3be6debe6d26b855c97358d8a11bf05acec137e5f53080f5ad2dd8 +SIZE (rust/crates/lexical-parse-float-0.8.3.crate) = 180161 +SHA256 (rust/crates/lexical-parse-integer-0.8.3.crate) = afc852ec67c6538bbb2b9911116a385b24510e879a69ab516e6a151b15a79168 +SIZE (rust/crates/lexical-parse-integer-0.8.3.crate) = 33597 +SHA256 (rust/crates/lexical-util-0.8.3.crate) = c72a9d52c5c4e62fa2cdc2cb6c694a39ae1382d9c2a17a466f18e272a0930eb1 +SIZE (rust/crates/lexical-util-0.8.3.crate) = 85091 +SHA256 (rust/crates/lexical-write-float-0.8.4.crate) = 8a89ec1d062e481210c309b672f73a0567b7855f21e7d2fae636df44d12e97f9 +SIZE (rust/crates/lexical-write-float-0.8.4.crate) = 100173 +SHA256 (rust/crates/lexical-write-integer-0.8.3.crate) = 094060bd2a7c2ff3a16d5304a6ae82727cb3cc9d1c70f813cc73f744c319337e +SIZE (rust/crates/lexical-write-integer-0.8.3.crate) = 55266 SHA256 (rust/crates/libc-0.2.106.crate) = a60553f9a9e039a333b4e9b20573b9e9b9c0bb3a11e201ccc48ef4283456d673 SIZE (rust/crates/libc-0.2.106.crate) = 554765 SHA256 (rust/crates/libffi-2.0.0.crate) = 1a1a541960580e84812cac19ec26926e883520bda211397a1f8c223993be6f20 @@ -391,8 +399,8 @@ SHA256 (rust/crates/libloading-0.7.3.crate) = efbc0f03f9a775e9f6aed295c6a1ba2253 SIZE (rust/crates/libloading-0.7.3.crate) = 27378 SHA256 (rust/crates/libm-0.2.1.crate) = c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a SIZE (rust/crates/libm-0.2.1.crate) = 111906 -SHA256 (rust/crates/libsqlite3-sys-0.22.2.crate) = 290b64917f8b0cb885d9de0f9959fe1f775d7fa12f1da2db9001c1c8ab60f89d -SIZE (rust/crates/libsqlite3-sys-0.22.2.crate) = 2345859 +SHA256 (rust/crates/libsqlite3-sys-0.24.1.crate) = cb644c388dfaefa18035c12614156d285364769e818893da0dda9030c80ad2ba +SIZE (rust/crates/libsqlite3-sys-0.24.1.crate) = 4769388 SHA256 (rust/crates/linked-hash-map-0.5.4.crate) = 7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3 SIZE (rust/crates/linked-hash-map-0.5.4.crate) = 16166 SHA256 (rust/crates/lock_api-0.4.6.crate) = 88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b @@ -447,8 +455,8 @@ SHA256 (rust/crates/notify-5.0.0-pre.12.crate) = 20a629259bb2c87a884bb76f6086c86 SIZE (rust/crates/notify-5.0.0-pre.12.crate) = 52799 SHA256 (rust/crates/ntapi-0.3.6.crate) = 3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44 SIZE (rust/crates/ntapi-0.3.6.crate) = 127221 -SHA256 (rust/crates/num-bigint-0.2.6.crate) = 090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304 -SIZE (rust/crates/num-bigint-0.2.6.crate) = 87275 +SHA256 (rust/crates/num-bigint-0.4.3.crate) = f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f +SIZE (rust/crates/num-bigint-0.4.3.crate) = 97799 SHA256 (rust/crates/num-bigint-dig-0.7.0.crate) = 4547ee5541c18742396ae2c895d0717d0f886d8823b8399cdaf7b07d63ad0480 SIZE (rust/crates/num-bigint-dig-0.7.0.crate) = 123930 SHA256 (rust/crates/num-format-0.4.0.crate) = bafe4179722c2894288ee77a9f044f02811c86af699344c498b0840c698a2465 @@ -459,14 +467,14 @@ SHA256 (rust/crates/num-iter-0.1.42.crate) = b2021c8337a54d21aca0d59a92577a029af SIZE (rust/crates/num-iter-0.1.42.crate) = 10450 SHA256 (rust/crates/num-traits-0.2.14.crate) = 9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290 SIZE (rust/crates/num-traits-0.2.14.crate) = 45476 -SHA256 (rust/crates/num_cpus-1.13.0.crate) = 05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3 -SIZE (rust/crates/num_cpus-1.13.0.crate) = 14704 +SHA256 (rust/crates/num_cpus-1.13.1.crate) = 19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1 +SIZE (rust/crates/num_cpus-1.13.1.crate) = 14752 SHA256 (rust/crates/objc-0.2.7.crate) = 915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1 SIZE (rust/crates/objc-0.2.7.crate) = 22036 SHA256 (rust/crates/objc_exception-0.1.2.crate) = ad970fb455818ad6cba4c122ad012fae53ae8b4795f86378bce65e4f6bab2ca4 SIZE (rust/crates/objc_exception-0.1.2.crate) = 2251 -SHA256 (rust/crates/once_cell-1.9.0.crate) = da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5 -SIZE (rust/crates/once_cell-1.9.0.crate) = 30702 +SHA256 (rust/crates/once_cell-1.10.0.crate) = 87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9 +SIZE (rust/crates/once_cell-1.10.0.crate) = 30414 SHA256 (rust/crates/opaque-debug-0.3.0.crate) = 624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5 SIZE (rust/crates/opaque-debug-0.3.0.crate) = 5767 SHA256 (rust/crates/openssl-probe-0.1.5.crate) = ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf @@ -477,34 +485,42 @@ SHA256 (rust/crates/os_str_bytes-6.0.0.crate) = 8e22443d1643a904602595ba1cd8f7d8 SIZE (rust/crates/os_str_bytes-6.0.0.crate) = 21046 SHA256 (rust/crates/output_vt100-0.1.2.crate) = 53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9 SIZE (rust/crates/output_vt100-0.1.2.crate) = 4062 -SHA256 (rust/crates/owning_ref-0.4.1.crate) = 6ff55baddef9e4ad00f88b6c743a2a8062d4c6ade126c2a528644b8e444d52ce -SIZE (rust/crates/owning_ref-0.4.1.crate) = 12658 SHA256 (rust/crates/p256-0.9.0.crate) = d053368e1bae4c8a672953397bd1bd7183dde1c72b0b7612a15719173148d186 SIZE (rust/crates/p256-0.9.0.crate) = 59435 SHA256 (rust/crates/p384-0.8.0.crate) = f23bc88c404ccc881c8a1ad62ba5cd7d336a64ecbf46de4874f2ad955f67b157 SIZE (rust/crates/p384-0.8.0.crate) = 8573 SHA256 (rust/crates/parking_lot-0.11.2.crate) = 7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99 SIZE (rust/crates/parking_lot-0.11.2.crate) = 39869 +SHA256 (rust/crates/parking_lot-0.12.0.crate) = 87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58 +SIZE (rust/crates/parking_lot-0.12.0.crate) = 39761 SHA256 (rust/crates/parking_lot_core-0.8.5.crate) = d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216 SIZE (rust/crates/parking_lot_core-0.8.5.crate) = 32466 +SHA256 (rust/crates/parking_lot_core-0.9.1.crate) = 28141e0cc4143da2443301914478dc976a61ffdb3f043058310c70df2fed8954 +SIZE (rust/crates/parking_lot_core-0.9.1.crate) = 32234 SHA256 (rust/crates/path-clean-0.1.0.crate) = ecba01bf2678719532c5e3059e0b5f0811273d94b397088b82e3bd0a78c78fdd SIZE (rust/crates/path-clean-0.1.0.crate) = 4909 +SHA256 (rust/crates/path-dedot-3.0.16.crate) = f326e2a3331685a5e3d4633bb9836bd92126e08037cb512252f3612f616a0b28 +SIZE (rust/crates/path-dedot-3.0.16.crate) = 6169 SHA256 (rust/crates/pem-rfc7468-0.2.4.crate) = 84e93a3b1cc0510b03020f33f21e62acdde3dcaef432edc95bea377fbd4c2cd4 SIZE (rust/crates/pem-rfc7468-0.2.4.crate) = 22137 SHA256 (rust/crates/percent-encoding-2.1.0.crate) = d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e SIZE (rust/crates/percent-encoding-2.1.0.crate) = 9748 SHA256 (rust/crates/pest-2.1.3.crate) = 10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53 SIZE (rust/crates/pest-2.1.3.crate) = 77986 -SHA256 (rust/crates/petgraph-0.5.1.crate) = 467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7 -SIZE (rust/crates/petgraph-0.5.1.crate) = 147173 -SHA256 (rust/crates/phf-0.8.0.crate) = 3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12 -SIZE (rust/crates/phf-0.8.0.crate) = 3902 +SHA256 (rust/crates/petgraph-0.6.0.crate) = 4a13a2fa9d0b63e5f22328828741e523766fff0ee9e779316902290dff3f824f +SIZE (rust/crates/petgraph-0.6.0.crate) = 182063 +SHA256 (rust/crates/phf-0.10.1.crate) = fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259 +SIZE (rust/crates/phf-0.10.1.crate) = 5406 SHA256 (rust/crates/phf_generator-0.8.0.crate) = 17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526 SIZE (rust/crates/phf_generator-0.8.0.crate) = 7604 -SHA256 (rust/crates/phf_macros-0.8.0.crate) = 7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c -SIZE (rust/crates/phf_macros-0.8.0.crate) = 5814 +SHA256 (rust/crates/phf_generator-0.10.0.crate) = 5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6 +SIZE (rust/crates/phf_generator-0.10.0.crate) = 7525 +SHA256 (rust/crates/phf_macros-0.10.0.crate) = 58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0 +SIZE (rust/crates/phf_macros-0.10.0.crate) = 3552 SHA256 (rust/crates/phf_shared-0.8.0.crate) = c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7 SIZE (rust/crates/phf_shared-0.8.0.crate) = 2860 +SHA256 (rust/crates/phf_shared-0.10.0.crate) = b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096 +SIZE (rust/crates/phf_shared-0.10.0.crate) = 4095 SHA256 (rust/crates/pin-project-1.0.8.crate) = 576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08 SIZE (rust/crates/pin-project-1.0.8.crate) = 54991 SHA256 (rust/crates/pin-project-internal-1.0.8.crate) = 6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389 @@ -603,8 +619,8 @@ SHA256 (rust/crates/ron-0.7.0.crate) = 1b861ecaade43ac97886a512b360d01d66be9f41f SIZE (rust/crates/ron-0.7.0.crate) = 45492 SHA256 (rust/crates/rsa-0.5.0.crate) = e05c2603e2823634ab331437001b411b9ed11660fbc4066f3908c84a9439260d SIZE (rust/crates/rsa-0.5.0.crate) = 52584 -SHA256 (rust/crates/rusqlite-0.25.4.crate) = 5c4b1eaf239b47034fb450ee9cdedd7d0226571689d8823030c4b6c2cb407152 -SIZE (rust/crates/rusqlite-0.25.4.crate) = 129773 +SHA256 (rust/crates/rusqlite-0.27.0.crate) = 85127183a999f7db96d1a976a309eebbfb6ea3b0b400ddd8340190129de6eb7a +SIZE (rust/crates/rusqlite-0.27.0.crate) = 128298 SHA256 (rust/crates/rustc-hash-1.1.0.crate) = 08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2 SIZE (rust/crates/rustc-hash-1.1.0.crate) = 9331 SHA256 (rust/crates/rustc_version-0.2.3.crate) = 138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a @@ -661,6 +677,8 @@ SHA256 (rust/crates/serde_urlencoded-0.7.1.crate) = d3491c14715ca2294c4d6a88f15e SIZE (rust/crates/serde_urlencoded-0.7.1.crate) = 12822 SHA256 (rust/crates/sha-1-0.9.8.crate) = 99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6 SIZE (rust/crates/sha-1-0.9.8.crate) = 14029 +SHA256 (rust/crates/sha-1-0.10.0.crate) = 028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f +SIZE (rust/crates/sha-1-0.10.0.crate) = 12239 SHA256 (rust/crates/sha2-0.9.9.crate) = 4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800 SIZE (rust/crates/sha2-0.9.9.crate) = 22247 SHA256 (rust/crates/sha2-0.10.1.crate) = 99c3bd8169c58782adad9290a9af5939994036b76187f7b4f0e6de91dbbfc0ec @@ -691,14 +709,12 @@ SHA256 (rust/crates/spirv-0.2.0+1.5.4.crate) = 246bfa38fe3db3f1dfc8ca5a2cdeb7348 SIZE (rust/crates/spirv-0.2.0+1.5.4.crate) = 30491 SHA256 (rust/crates/spki-0.4.1.crate) = 5c01a0c15da1b0b0e1494112e7af814a678fec9bd157881b49beac661e9b6f32 SIZE (rust/crates/spki-0.4.1.crate) = 8055 -SHA256 (rust/crates/stable_deref_trait-1.2.0.crate) = a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3 -SIZE (rust/crates/stable_deref_trait-1.2.0.crate) = 8054 SHA256 (rust/crates/static_assertions-1.1.0.crate) = a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f SIZE (rust/crates/static_assertions-1.1.0.crate) = 18480 SHA256 (rust/crates/str-buf-1.0.5.crate) = d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a SIZE (rust/crates/str-buf-1.0.5.crate) = 3852 -SHA256 (rust/crates/string_cache-0.8.2.crate) = 923f0f39b6267d37d23ce71ae7235602134b250ace715dd2c90421998ddac0c6 -SIZE (rust/crates/string_cache-0.8.2.crate) = 15770 +SHA256 (rust/crates/string_cache-0.8.3.crate) = 33994d0838dc2d152d17a62adf608a869b5e846b65b389af7f3dbc1de45c5b26 +SIZE (rust/crates/string_cache-0.8.3.crate) = 16261 SHA256 (rust/crates/string_cache_codegen-0.5.1.crate) = f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97 SIZE (rust/crates/string_cache_codegen-0.5.1.crate) = 8243 SHA256 (rust/crates/string_enum-0.3.1.crate) = f584cc881e9e5f1fd6bf827b0444aa94c30d8fe6378cf241071b5f5700b2871f @@ -711,50 +727,50 @@ SHA256 (rust/crates/subtle-2.4.1.crate) = 6bdef32e8150c2a081110b42772ffe7d7c9032 SIZE (rust/crates/subtle-2.4.1.crate) = 12630 SHA256 (rust/crates/swc_atoms-0.2.9.crate) = 9f5229fe227ff0060e13baa386d6e368797700eab909523f730008d191ee53ae SIZE (rust/crates/swc_atoms-0.2.9.crate) = 4959 -SHA256 (rust/crates/swc_bundler-0.107.0.crate) = c5b6761f9b57072658a42ff25304a7edc7f8444024726ed27d5861e4644c1ffe -SIZE (rust/crates/swc_bundler-0.107.0.crate) = 68957 -SHA256 (rust/crates/swc_common-0.17.5.crate) = dfac713c943b08c8e364801b24a2ea24ea657eadf22826840aa858eccd04d828 -SIZE (rust/crates/swc_common-0.17.5.crate) = 83166 -SHA256 (rust/crates/swc_ecma_ast-0.65.3.crate) = ff1aefdc512c8dfefa1de793c0e62a192bd74a7fa7e5affe3f08885751c127ee -SIZE (rust/crates/swc_ecma_ast-0.65.3.crate) = 22171 -SHA256 (rust/crates/swc_ecma_codegen-0.90.0.crate) = 7cf4da57030370c49c343e536d3c7ab66dca3bd8da000b6040884fc6ac689241 -SIZE (rust/crates/swc_ecma_codegen-0.90.0.crate) = 41781 -SHA256 (rust/crates/swc_ecma_codegen_macros-0.6.0.crate) = bdbf826c739281cdb3b3c23883fd1a7586ea1c15b1287530e7123a7fad8f0e25 -SIZE (rust/crates/swc_ecma_codegen_macros-0.6.0.crate) = 2986 -SHA256 (rust/crates/swc_ecma_dep_graph-0.59.0.crate) = c8ccd6ffe5f3c66da6713d97225561dd85c49d275343b787fc603193ec691955 -SIZE (rust/crates/swc_ecma_dep_graph-0.59.0.crate) = 5481 -SHA256 (rust/crates/swc_ecma_loader-0.28.0.crate) = be60d3b599557e0b49d06e9cad351ec196e2ab9e9a369a0780f000a47ab58404 -SIZE (rust/crates/swc_ecma_loader-0.28.0.crate) = 8123 -SHA256 (rust/crates/swc_ecma_parser-0.88.3.crate) = bfd30c93f08afdf29226b5695e45aadcc6ce452470cc63ea87a7eb53d29bb02b -SIZE (rust/crates/swc_ecma_parser-0.88.3.crate) = 125620 -SHA256 (rust/crates/swc_ecma_transforms-0.117.0.crate) = b1fa132c1a736c2c61736958f1102249348a4cc911c7f60e3a6255aa49c1c03e -SIZE (rust/crates/swc_ecma_transforms-0.117.0.crate) = 21918 -SHA256 (rust/crates/swc_ecma_transforms_base-0.58.3.crate) = 9476fc0cd42cfc262764a21033a4b2cf5ee9b67d207007e0515af14f9e2aa253 -SIZE (rust/crates/swc_ecma_transforms_base-0.58.3.crate) = 98120 -SHA256 (rust/crates/swc_ecma_transforms_classes-0.46.0.crate) = ac8983235c6902879b65dcb1003d4084adf094408c96d94d62d3f33f44c3fa8e -SIZE (rust/crates/swc_ecma_transforms_classes-0.46.0.crate) = 4696 +SHA256 (rust/crates/swc_bundler-0.127.0.crate) = 27236918ff72391d5a302763eb64c30069bd073eff8db44954d3c9c65387b7e9 +SIZE (rust/crates/swc_bundler-0.127.0.crate) = 71090 +SHA256 (rust/crates/swc_common-0.17.18.crate) = 278ad1cbb3fb3b2686c86a7dd5f307ef791918d249a6da60fa6cd3388f4c6a78 +SIZE (rust/crates/swc_common-0.17.18.crate) = 85137 +SHA256 (rust/crates/swc_ecma_ast-0.71.0.crate) = 8ae7b943caae6d3fbae0534ce2df9866efa3d0415199ce7d20c6ef7e4e0b233d +SIZE (rust/crates/swc_ecma_ast-0.71.0.crate) = 23689 +SHA256 (rust/crates/swc_ecma_codegen-0.98.1.crate) = a6382f90a3e5ea88e9fe73c1e0afcf31fe5687f78994b39246c2c2bc8bbe51d8 +SIZE (rust/crates/swc_ecma_codegen-0.98.1.crate) = 45336 +SHA256 (rust/crates/swc_ecma_codegen_macros-0.7.0.crate) = 59949619b2ef45eedb6c399d05f2c3c7bc678b5074b3103bb670f9e05bb99042 +SIZE (rust/crates/swc_ecma_codegen_macros-0.7.0.crate) = 2988 +SHA256 (rust/crates/swc_ecma_dep_graph-0.67.0.crate) = 357351d97268fce258c4b2b684ad1143e37c087a2b8314c48dd8e3cd6b1fd138 +SIZE (rust/crates/swc_ecma_dep_graph-0.67.0.crate) = 5900 +SHA256 (rust/crates/swc_ecma_loader-0.29.0.crate) = f9ab69df5d4de425833e02de111f14b5544b39ad9c9b82c97e4835fc55c8f1b6 +SIZE (rust/crates/swc_ecma_loader-0.29.0.crate) = 8069 +SHA256 (rust/crates/swc_ecma_parser-0.95.1.crate) = 6bbca18d756dddd0a87e101dd07157cd466a22787e9b5447ab85da2faa352bd8 +SIZE (rust/crates/swc_ecma_parser-0.95.1.crate) = 131429 +SHA256 (rust/crates/swc_ecma_transforms-0.136.0.crate) = b6ed0ab832bbd43221108c4777cab793f888f8810f3e2d80069ba95ba2813136 +SIZE (rust/crates/swc_ecma_transforms-0.136.0.crate) = 1772 +SHA256 (rust/crates/swc_ecma_transforms_base-0.70.1.crate) = 866d27f3acf3686d09813684438a60be0e3333baa0b716eca774748ca4691e44 +SIZE (rust/crates/swc_ecma_transforms_base-0.70.1.crate) = 99542 +SHA256 (rust/crates/swc_ecma_transforms_classes-0.58.0.crate) = 3c16df5d4468e8f54b89eccf0876337c4c672b6434092ec83e71e7c678d1fdd3 +SIZE (rust/crates/swc_ecma_transforms_classes-0.58.0.crate) = 4698 SHA256 (rust/crates/swc_ecma_transforms_macros-0.3.0.crate) = 18712e4aab969c6508dff3540ade6358f1e013464aa58b3d30da2ab2d9fcbbed SIZE (rust/crates/swc_ecma_transforms_macros-0.3.0.crate) = 4357 -SHA256 (rust/crates/swc_ecma_transforms_optimization-0.87.0.crate) = e074149dd5e969d35a790851b47b0f76700b14fcfda1c05e15751c1458e2dd38 -SIZE (rust/crates/swc_ecma_transforms_optimization-0.87.0.crate) = 69731 -SHA256 (rust/crates/swc_ecma_transforms_proposal-0.77.0.crate) = 048ac8ea82e02fa9a54b9aa448dc5d15a8e994304364fcd8c4e2f650572c9141 -SIZE (rust/crates/swc_ecma_transforms_proposal-0.77.0.crate) = 14800 -SHA256 (rust/crates/swc_ecma_transforms_react-0.80.0.crate) = 9fb5b7c98597bf41d1503ca4039be5445fd02e7aa381ae520d1c78a8a370f7f5 -SIZE (rust/crates/swc_ecma_transforms_react-0.80.0.crate) = 34149 -SHA256 (rust/crates/swc_ecma_transforms_typescript-0.82.0.crate) = f7859a18a33f751d488fbc1b7a0073fb08c69d794ad1f8daa2da47bfda2d9242 -SIZE (rust/crates/swc_ecma_transforms_typescript-0.82.0.crate) = 74012 -SHA256 (rust/crates/swc_ecma_utils-0.65.3.crate) = b462ac7dd5340544e7a12965bb7fbbbf9db8b26c1b32159b43c4b2430fed3fc8 -SIZE (rust/crates/swc_ecma_utils-0.65.3.crate) = 22980 -SHA256 (rust/crates/swc_ecma_visit-0.51.1.crate) = 32aa4c53401d1390aa45043e0a69d52e1a04ef45845e19b55c484462e6dcd048 -SIZE (rust/crates/swc_ecma_visit-0.51.1.crate) = 9036 -SHA256 (rust/crates/swc_ecmascript-0.114.3.crate) = 988f15eb7dcd2a9e4c93e11d27dbcdfbf9e350776af36f51310e3d2ee91635ad -SIZE (rust/crates/swc_ecmascript-0.114.3.crate) = 1523 +SHA256 (rust/crates/swc_ecma_transforms_optimization-0.106.0.crate) = dca63fd94ef598a08aa0a8bb46506896efe93acf5e0e6e03fef7b02bab094285 +SIZE (rust/crates/swc_ecma_transforms_optimization-0.106.0.crate) = 53900 +SHA256 (rust/crates/swc_ecma_transforms_proposal-0.91.0.crate) = 2d5ff5321ecdd0a3e620878e02452a6475b9ffdcaf75a2cf9636c2d31bb85fe0 +SIZE (rust/crates/swc_ecma_transforms_proposal-0.91.0.crate) = 14182 +SHA256 (rust/crates/swc_ecma_transforms_react-0.98.0.crate) = 4fd0f164c04335aa8b7c09004dc85643eb47f2aad0cd0c8975bb93f87e9b3796 +SIZE (rust/crates/swc_ecma_transforms_react-0.98.0.crate) = 34138 +SHA256 (rust/crates/swc_ecma_transforms_typescript-0.101.0.crate) = f9cf27bb12520f2540ceb71d42224246bc3f1955a1c85484974fca482ed54a0c +SIZE (rust/crates/swc_ecma_transforms_typescript-0.101.0.crate) = 31479 +SHA256 (rust/crates/swc_ecma_utils-0.74.0.crate) = daa3ba57f53fc15882d2ea288f9a4b6c3a6e97c015d7b9603035be424bc19007 +SIZE (rust/crates/swc_ecma_utils-0.74.0.crate) = 23477 +SHA256 (rust/crates/swc_ecma_visit-0.57.0.crate) = 7588bf6b02705a25356a130acdfec125b6a1dcd5390a5718082ae4f2ede85ee3 +SIZE (rust/crates/swc_ecma_visit-0.57.0.crate) = 9101 +SHA256 (rust/crates/swc_ecmascript-0.137.0.crate) = 9c7417fbd813809f69e34ec64c2047befc219e99fa25b0678870ca17ae3b5805 +SIZE (rust/crates/swc_ecmascript-0.137.0.crate) = 1548 SHA256 (rust/crates/swc_eq_ignore_macros-0.1.0.crate) = 8c8f200a2eaed938e7c1a685faaa66e6d42fa9e17da5f62572d3cbc335898f5e SIZE (rust/crates/swc_eq_ignore_macros-0.1.0.crate) = 2900 -SHA256 (rust/crates/swc_fast_graph-0.4.0.crate) = 1d53bbcbb4b055c547f283af1f84211f425b95ac59e02d8b70c94b8a63a4704f -SIZE (rust/crates/swc_fast_graph-0.4.0.crate) = 6842 -SHA256 (rust/crates/swc_graph_analyzer-0.4.0.crate) = 83b42a8b13068dd90dec954ec44576d5922914687bc34277f3b0f8d0bbeb4e83 -SIZE (rust/crates/swc_graph_analyzer-0.4.0.crate) = 1971 +SHA256 (rust/crates/swc_fast_graph-0.5.0.crate) = 9860ef8ffc31eedf45bc39a60a2500838a331e3e687bc005fe69088f6a966460 +SIZE (rust/crates/swc_fast_graph-0.5.0.crate) = 6836 +SHA256 (rust/crates/swc_graph_analyzer-0.6.0.crate) = 67696e05cdf3efc1daded3b4803639da25fd9254ca6bae16539058197a411de8 +SIZE (rust/crates/swc_graph_analyzer-0.6.0.crate) = 1980 SHA256 (rust/crates/swc_macros_common-0.3.2.crate) = bf7c68e78ffbcba3d38abe6d0b76a0e1a37888b5c9301db3426537207090ada3 SIZE (rust/crates/swc_macros_common-0.3.2.crate) = 6625 SHA256 (rust/crates/swc_visit-0.3.0.crate) = e5c639379dd2a8a0221fa1e12fafbdd594ba53a0cace6560054da52409dfcc1a @@ -805,12 +821,12 @@ SHA256 (rust/crates/toml-0.5.8.crate) = a31142970826733df8241ef35dc040ef98c679ab SIZE (rust/crates/toml-0.5.8.crate) = 54219 SHA256 (rust/crates/tower-service-0.3.1.crate) = 360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6 SIZE (rust/crates/tower-service-0.3.1.crate) = 6299 -SHA256 (rust/crates/tracing-0.1.29.crate) = 375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105 -SIZE (rust/crates/tracing-0.1.29.crate) = 72444 -SHA256 (rust/crates/tracing-attributes-0.1.18.crate) = f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e -SIZE (rust/crates/tracing-attributes-0.1.18.crate) = 22779 -SHA256 (rust/crates/tracing-core-0.1.21.crate) = 1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4 -SIZE (rust/crates/tracing-core-0.1.21.crate) = 50683 +SHA256 (rust/crates/tracing-0.1.32.crate) = 4a1bdf54a7c28a2bbf701e1d2233f6c77f473486b94bee4f9678da5a148dca7f +SIZE (rust/crates/tracing-0.1.32.crate) = 77985 +SHA256 (rust/crates/tracing-attributes-0.1.20.crate) = 2e65ce065b4b5c53e73bb28912318cb8c9e9ad3921f1d669eb0e68b4c8143a2b +SIZE (rust/crates/tracing-attributes-0.1.20.crate) = 26316 +SHA256 (rust/crates/tracing-core-0.1.23.crate) = aa31669fa42c09c34d94d8165dd2012e8ff3c66aca50f3bb226b68f216f2706c +SIZE (rust/crates/tracing-core-0.1.23.crate) = 53204 SHA256 (rust/crates/trust-dns-client-0.20.3.crate) = fea72219106741b56ebab5e58e506beb657e1ed5d568a987141a9659124474f9 SIZE (rust/crates/trust-dns-client-0.20.3.crate) = 75793 SHA256 (rust/crates/trust-dns-proto-0.20.3.crate) = ad0d7f5db438199a6e2609debe3f69f808d074e0a2888ee0bccb45fe234d03f4 @@ -847,6 +863,8 @@ SHA256 (rust/crates/unicase-2.6.0.crate) = 50f37be617794602aabbeee0be4f259dc1778 SIZE (rust/crates/unicase-2.6.0.crate) = 23478 SHA256 (rust/crates/unicode-bidi-0.3.7.crate) = 1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f SIZE (rust/crates/unicode-bidi-0.3.7.crate) = 33759 +SHA256 (rust/crates/unicode-id-0.3.0.crate) = 4285d92be83dfbc8950a2601178b89ed36f979ebf51bfcf7b272b17001184e6c +SIZE (rust/crates/unicode-id-0.3.0.crate) = 15119 SHA256 (rust/crates/unicode-normalization-0.1.19.crate) = d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9 SIZE (rust/crates/unicode-normalization-0.1.19.crate) = 107353 SHA256 (rust/crates/unicode-segmentation-1.8.0.crate) = 8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b @@ -931,16 +949,28 @@ SHA256 (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.crate) = 712e227841d057c1 SIZE (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.crate) = 2947998 SHA256 (rust/crates/windows-sys-0.28.0.crate) = 82ca39602d5cbfa692c4b67e3bcbb2751477355141c1ed434c94da4186836ff6 SIZE (rust/crates/windows-sys-0.28.0.crate) = 3075898 +SHA256 (rust/crates/windows-sys-0.32.0.crate) = 3df6e476185f92a12c072be4a189a0210dcdcf512a1891d6dff9edb874deadc6 +SIZE (rust/crates/windows-sys-0.32.0.crate) = 3361554 SHA256 (rust/crates/windows_aarch64_msvc-0.28.0.crate) = 52695a41e536859d5308cc613b4a022261a274390b25bd29dfff4bf08505f3c2 SIZE (rust/crates/windows_aarch64_msvc-0.28.0.crate) = 669636 +SHA256 (rust/crates/windows_aarch64_msvc-0.32.0.crate) = d8e92753b1c443191654ec532f14c199742964a061be25d77d7a96f09db20bf5 +SIZE (rust/crates/windows_aarch64_msvc-0.32.0.crate) = 673810 SHA256 (rust/crates/windows_i686_gnu-0.28.0.crate) = f54725ac23affef038fecb177de6c9bf065787c2f432f79e3c373da92f3e1d8a SIZE (rust/crates/windows_i686_gnu-0.28.0.crate) = 774446 +SHA256 (rust/crates/windows_i686_gnu-0.32.0.crate) = 6a711c68811799e017b6038e0922cb27a5e2f43a2ddb609fe0b6f3eeda9de615 +SIZE (rust/crates/windows_i686_gnu-0.32.0.crate) = 772251 SHA256 (rust/crates/windows_i686_msvc-0.28.0.crate) = 51d5158a43cc43623c0729d1ad6647e62fa384a3d135fd15108d37c683461f64 SIZE (rust/crates/windows_i686_msvc-0.28.0.crate) = 732280 +SHA256 (rust/crates/windows_i686_msvc-0.32.0.crate) = 146c11bb1a02615db74680b32a68e2d61f553cc24c4eb5b4ca10311740e44172 +SIZE (rust/crates/windows_i686_msvc-0.32.0.crate) = 733422 SHA256 (rust/crates/windows_x86_64_gnu-0.28.0.crate) = bc31f409f565611535130cfe7ee8e6655d3fa99c1c61013981e491921b5ce954 SIZE (rust/crates/windows_x86_64_gnu-0.28.0.crate) = 743221 +SHA256 (rust/crates/windows_x86_64_gnu-0.32.0.crate) = c912b12f7454c6620635bbff3450962753834be2a594819bd5e945af18ec64bc +SIZE (rust/crates/windows_x86_64_gnu-0.32.0.crate) = 744209 SHA256 (rust/crates/windows_x86_64_msvc-0.28.0.crate) = 3f2b8c7cbd3bfdddd9ab98769f9746a7fad1bca236554cd032b78d768bc0e89f SIZE (rust/crates/windows_x86_64_msvc-0.28.0.crate) = 668950 +SHA256 (rust/crates/windows_x86_64_msvc-0.32.0.crate) = 504a2476202769977a040c6364301a3f65d0cc9e3fb08600b2bda150a0488316 +SIZE (rust/crates/windows_x86_64_msvc-0.32.0.crate) = 669835 SHA256 (rust/crates/winreg-0.6.2.crate) = b2986deb581c4fe11b621998a5e53361efe6b48a151178d0cd9eeffa4dc6acc9 SIZE (rust/crates/winreg-0.6.2.crate) = 19338 SHA256 (rust/crates/winreg-0.10.1.crate) = 80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d @@ -957,5 +987,5 @@ SHA256 (rust/crates/zstd-safe-4.1.3+zstd.1.5.1.crate) = e99d81b99fb3c2c2c794e3fe SIZE (rust/crates/zstd-safe-4.1.3+zstd.1.5.1.crate) = 16677 SHA256 (rust/crates/zstd-sys-1.6.2+zstd.1.5.1.crate) = 2daf2f248d9ea44454bfcb2516534e8b8ad2fc91bf818a1885495fc42bc8ac9f SIZE (rust/crates/zstd-sys-1.6.2+zstd.1.5.1.crate) = 718776 -SHA256 (denoland-deno-v1.20.1_GH0.tar.gz) = c5d0dc71dfa300ce14a8057fd4c0d16b86e963fff34c48ea28782521ce1144fc -SIZE (denoland-deno-v1.20.1_GH0.tar.gz) = 8347361 +SHA256 (denoland-deno-v1.20.3_GH0.tar.gz) = e7e7f99466469143dbf8f00a3d975708cec199b957f913e7ac48be61162a8ef6 +SIZE (denoland-deno-v1.20.3_GH0.tar.gz) = 8352616 diff --git a/www/epiphany/Makefile b/www/epiphany/Makefile index ee429ee1448..1bfd6c984cd 100644 --- a/www/epiphany/Makefile +++ b/www/epiphany/Makefile @@ -2,6 +2,7 @@ PORTNAME= epiphany PORTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= www gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/www/flickcurl/Makefile b/www/flickcurl/Makefile index 8f540758aa8..7d3c7c0066d 100644 --- a/www/flickcurl/Makefile +++ b/www/flickcurl/Makefile @@ -2,7 +2,7 @@ PORTNAME= flickcurl PORTVERSION= 1.26 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= www MASTER_SITES= http://download.dajobe.org/flickcurl/ diff --git a/www/gitlab-ce/Makefile b/www/gitlab-ce/Makefile index 73bc2237e14..03f07c4e027 100644 --- a/www/gitlab-ce/Makefile +++ b/www/gitlab-ce/Makefile @@ -1,7 +1,7 @@ # Created by: Torsten Zuehlsdorff PORTNAME= gitlab-ce -PORTVERSION= 14.8.4 +PORTVERSION= 14.9.1 PORTREVISION= 0 CATEGORIES= www devel @@ -18,18 +18,19 @@ LICENSE_FILE= ${WRKSRC}/LICENSE MY_DEPENDS= git>=2.32.0:devel/git \ gitaly>=${PORTVERSION}:devel/gitaly \ gitlab-elasticsearch-indexer>=2.17.0:textproc/gitlab-elasticsearch-indexer \ - gitlab-agent>=14.8.1:net/gitlab-agent \ - gitlab-pages>=1.54.0:www/gitlab-pages \ - gitlab-shell>=13.23.2:devel/gitlab-shell \ + gitlab-agent>=14.9.0:net/gitlab-agent \ + gitlab-pages>=1.56.0:www/gitlab-pages \ + gitlab-shell>=13.24.0:devel/gitlab-shell \ gitlab-workhorse>=${PORTVERSION}:www/gitlab-workhorse \ redis>=6.0.14:databases/redis \ yarn>=1.10.0:www/yarn \ gtar>0:archivers/gtar \ bash>0:shells/bash \ rubygem-rails61>=6.1.4.6<6.1.5:www/rubygem-rails61 \ - rubygem-bootsnap>=1.9.1<1.11:devel/rubygem-bootsnap \ + rubygem-bootsnap>=1.9.1<1.12:devel/rubygem-bootsnap \ rubygem-responders-rails61>=3.0<4.0:www/rubygem-responders-rails61 \ rubygem-sprockets3>=3.7<3.8:devel/rubygem-sprockets3 \ + rubygem-view_component-rails61>=2.50.0<2.51:devel/rubygem-view_component-rails61 \ rubygem-default_value_for>=3.4.0<3.5.0:devel/rubygem-default_value_for \ rubygem-pg>=1.2.3<2:databases/rubygem-pg \ rubygem-rugged>=1.2<2:devel/rubygem-rugged \ @@ -118,7 +119,7 @@ MY_DEPENDS= git>=2.32.0:devel/git \ rubygem-deckar01-task_list=2.3.1:www/rubygem-deckar01-task_list \ rubygem-gitlab-markup>=1.8.0<1.9.0:textproc/rubygem-gitlab-markup \ rubygem-github-markup17>=1.7.0<1.8.0:textproc/rubygem-github-markup17 \ - rubygem-commonmarker>=0.23.2<0.24:textproc/rubygem-commonmarker \ + rubygem-commonmarker>=0.23.4<0.24:textproc/rubygem-commonmarker \ rubygem-kramdown>=2.3.1<2.4:textproc/rubygem-kramdown \ rubygem-redcloth>=4.3.2<4.4.0:www/rubygem-redcloth \ rubygem-rdoc63>=6.3.2<7:devel/rubygem-rdoc63 \ @@ -183,13 +184,16 @@ MY_DEPENDS= git>=2.32.0:devel/git \ rubygem-autoprefixer-rails1025>=10.2.5.1<10.2.6:textproc/rubygem-autoprefixer-rails1025 \ rubygem-terser>=1.0.2:textproc/rubygem-terser \ rubygem-addressable>=2.8<3:www/rubygem-addressable \ - rubygem-tanuki_emoji>=0.5<1:graphics/rubygem-tanuki_emoji \ + rubygem-tanuki_emoji>=0.6<1:graphics/rubygem-tanuki_emoji \ rubygem-gon-rails61>=6.4.0<6.5:www/rubygem-gon-rails61 \ rubygem-request_store>=1.5.1<2.0:devel/rubygem-request_store \ rubygem-base32>=0.3.0<0.4.0:converters/rubygem-base32 \ rubygem-gitlab-license>=2.1.0<2.2:devel/rubygem-gitlab-license \ rubygem-rack-attack>=6.3.0<6.4.0:www/rubygem-rack-attack \ rubygem-sentry-raven>=3.1<4.0:devel/rubygem-sentry-raven \ + rubygem-sentry-ruby>=5.1.1<5.2:devel/rubygem-sentry-ruby \ + rubygem-sentry-rails>=5.1.1<5.2:devel/rubygem-sentry-rails \ + rubygem-sentry-sidekiq>=5.1.1<5.2:devel/rubygem-sentry-sidekiq \ rubygem-pg_query>=2.1<3:databases/rubygem-pg_query \ rubygem-premailer-rails-rails61>=1.10.3<1.11.0:mail/rubygem-premailer-rails-rails61 \ rubygem-gitlab-labkit>=0.22.0<0.23:devel/rubygem-gitlab-labkit \ @@ -224,7 +228,7 @@ MY_DEPENDS= git>=2.32.0:devel/git \ rubygem-net-ntp>0:net/rubygem-net-ntp \ rubygem-ssh_data>=1.2<2:security/rubygem-ssh_data \ rubygem-spamcheck>=0.1.0<0.2:net/rubygem-spamcheck \ - rubygem-gitaly>=14.8.0.pre.rc1<14.9.0:net/rubygem-gitaly \ + rubygem-gitaly>=14.9.0.pre.rc4<14.10.0:net/rubygem-gitaly \ rubygem-kas-grpc>=0.0.2,1<0.0.3,1:net/rubygem-kas-grpc \ rubygem-grpc142>=1.42.0<1.43:net/rubygem-grpc142 \ rubygem-google-protobuf>=3.19.0<3.20:devel/rubygem-google-protobuf \ @@ -252,7 +256,7 @@ MY_DEPENDS= git>=2.32.0:devel/git \ rubygem-webauthn>=2.3<3:security/rubygem-webauthn \ rubygem-ipaddress>=0.8.3<0.9:net/rubygem-ipaddress \ rubygem-parslet1>=1.8<2:textproc/rubygem-parslet1 \ - rubygem-ipynbdiff>=0.3.8<0.3.9:devel/rubygem-ipynbdiff \ + rubygem-ipynbdiff>=0.4.4<0.4.5:devel/rubygem-ipynbdiff \ rubygem-parser>=0:devel/rubygem-parser BUILD_DEPENDS= gem:devel/ruby-gems \ @@ -274,7 +278,7 @@ USE_GITLAB= yes GL_ACCOUNT= gitlab-org GL_PROJECT= gitlab-foss # Find the here: https://gitlab.com/gitlab-org/gitlab-foss/-/tags -GL_COMMIT= ef2df7d848f4b13324ed2202fc17b983f34b154d +GL_COMMIT= 5b713e1e9a309d1afff4c7068d1e03fbd6a5f7f7 USERS= git GROUPS= git diff --git a/www/gitlab-ce/distinfo b/www/gitlab-ce/distinfo index c731f9dd4f7..9f1230cd030 100644 --- a/www/gitlab-ce/distinfo +++ b/www/gitlab-ce/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1647759469 -SHA256 (gitlab-org-gitlab-foss-ef2df7d848f4b13324ed2202fc17b983f34b154d_GL0.tar.gz) = a17424d58047178e5b6cb3cd87aa0e5203d6c900872466467598b935575758c3 -SIZE (gitlab-org-gitlab-foss-ef2df7d848f4b13324ed2202fc17b983f34b154d_GL0.tar.gz) = 102525535 +TIMESTAMP = 1648219074 +SHA256 (gitlab-org-gitlab-foss-5b713e1e9a309d1afff4c7068d1e03fbd6a5f7f7_GL0.tar.gz) = 9685e3cb51f4fc0b22ce5a9dc0bf25cc925c269c1346fd45693a3377756cc166 +SIZE (gitlab-org-gitlab-foss-5b713e1e9a309d1afff4c7068d1e03fbd6a5f7f7_GL0.tar.gz) = 102356177 diff --git a/www/gitlab-ce/files/patch-Gemfile b/www/gitlab-ce/files/patch-Gemfile index 8e0f96a93c6..fd27741726a 100644 --- a/www/gitlab-ce/files/patch-Gemfile +++ b/www/gitlab-ce/files/patch-Gemfile @@ -1,4 +1,4 @@ ---- Gemfile.orig 2022-02-21 12:45:48 UTC +--- Gemfile.orig 2022-03-21 09:13:50 UTC +++ Gemfile @@ -4,7 +4,7 @@ source 'https://rubygems.org' @@ -9,7 +9,7 @@ # Responders respond_to and respond_with gem 'responders', '~> 3.0' -@@ -89,7 +89,7 @@ gem 'gpgme', '~> 2.0.19' +@@ -91,7 +91,7 @@ gem 'gpgme', '~> 2.0.19' # GitLab fork with several improvements to original library. For full list of changes # see https://github.com/intridea/omniauth-ldap/compare/master...gitlabhq:master gem 'gitlab_omniauth-ldap', '~> 2.1.1', require: 'omniauth-ldap' @@ -18,7 +18,7 @@ # API gem 'grape', '~> 1.5.2' -@@ -100,7 +100,6 @@ gem 'rack-cors', '~> 1.0.6', require: 'rack/cors' +@@ -102,7 +102,6 @@ gem 'rack-cors', '~> 1.0.6', require: 'rack/cors' gem 'graphql', '~> 1.11.10' gem 'graphiql-rails', '~> 1.8' gem 'apollo_upload_server', '~> 2.1.0' @@ -26,7 +26,7 @@ gem 'graphlient', '~> 0.4.0' # Used by BulkImport feature (group::import) gem 'hashie' -@@ -328,115 +327,14 @@ gem 'batch-loader', '~> 2.0.1' +@@ -333,118 +332,14 @@ gem 'batch-loader', '~> 2.0.1' gem 'peek', '~> 1.1' # Snowplow events tracking @@ -77,7 +77,7 @@ - gem 'spring', '~> 2.1.0' - gem 'spring-commands-rspec', '~> 1.0.4' - -- gem 'gitlab-styles', '~> 6.6.0', require: false +- gem 'gitlab-styles', '~> 7.0.0', require: false - - gem 'haml_lint', '~> 0.36.0', require: false - gem 'bundler-audit', '~> 0.7.0.1', require: false @@ -96,14 +96,16 @@ - gem 'parallel', '~> 1.19', require: false - - gem 'test_file_finder', '~> 0.1.3' +- +- gem 'sigdump', '~> 0.2.4', require: 'sigdump/setup' -end - -group :development, :test, :danger do -- gem 'gitlab-dangerfiles', '~> 2.8.0', require: false +- gem 'gitlab-dangerfiles', '~> 2.11.0', require: false -end - -group :development, :test, :coverage do -- gem 'simplecov', '~> 0.18.5', require: false +- gem 'simplecov', '~> 0.21', require: false - gem 'simplecov-lcov', '~> 0.8.0', require: false - gem 'simplecov-cobertura', '~> 1.3.1', require: false - gem 'undercover', '~> 0.4.4', require: false @@ -121,6 +123,7 @@ - gem 'fuubar', '~> 2.2.0' - gem 'rspec-retry', '~> 0.6.1' - gem 'rspec_profiling', '~> 0.0.6' +- gem 'rspec-benchmark', '~> 0.6.0' - gem 'rspec-parameterized', require: false - - gem 'capybara', '~> 3.35.3' @@ -143,10 +146,9 @@ gem 'octokit', '~> 4.15' # Updating this gem version here is deprecated. See: -@@ -535,3 +433,6 @@ gem 'ipaddress', '~> 0.8.3' +@@ -543,3 +438,5 @@ gem 'ipaddress', '~> 0.8.3' gem 'parslet', '~> 1.8' - gem 'ipynbdiff', '0.3.8' + gem 'ipynbdiff', '0.4.4' + +gem 'parser' -+ diff --git a/www/gitlab-ce/files/patch-config_gitlab.yml.example b/www/gitlab-ce/files/patch-config_gitlab.yml.example index 2bcd5cf510f..c2e2433f9fa 100644 --- a/www/gitlab-ce/files/patch-config_gitlab.yml.example +++ b/www/gitlab-ce/files/patch-config_gitlab.yml.example @@ -1,6 +1,6 @@ ---- config/gitlab.yml.example.orig 2022-02-21 12:45:48 UTC +--- config/gitlab.yml.example.orig 2022-03-21 09:13:50 UTC +++ config/gitlab.yml.example -@@ -1156,14 +1156,14 @@ production: &base +@@ -1172,14 +1172,14 @@ production: &base # real path not the symlink. storages: # You must have at least a `default` storage path. default: @@ -18,7 +18,7 @@ # archive_permissions: 0640 # Permissions for the resulting backup.tar file (default: 0600) # keep_time: 604800 # default: 0 (forever) (in seconds) # pg_schema: public # default: nil, it means that all schemas will be backed up -@@ -1212,12 +1212,12 @@ production: &base +@@ -1228,12 +1228,12 @@ production: &base ## GitLab Shell settings gitlab_shell: @@ -34,7 +34,7 @@ # Git over HTTP upload_pack: true -@@ -1232,13 +1232,13 @@ production: &base +@@ -1248,13 +1248,13 @@ production: &base workhorse: # File that contains the secret key for verifying access for gitlab-workhorse. # Default is '.gitlab_workhorse_secret' relative to Rails.root (i.e. root of the GitLab app). @@ -50,7 +50,7 @@ # The URL to the external KAS API (used by the Kubernetes agents) # external_url: wss://kas.example.com -@@ -1251,13 +1251,13 @@ production: &base +@@ -1267,13 +1267,13 @@ production: &base ## GitLab Elasticsearch settings elasticsearch: @@ -66,7 +66,7 @@ ## Webpack settings # If enabled, this will tell rails to serve frontend assets from the webpack-dev-server running -@@ -1278,16 +1278,16 @@ production: &base +@@ -1294,16 +1294,16 @@ production: &base # Sidekiq exporter is webserver built in to Sidekiq to expose Prometheus metrics sidekiq_exporter: @@ -91,7 +91,7 @@ # Web exporter is a dedicated Rack server running alongside Puma to expose Prometheus metrics # It runs alongside the `/metrics` endpoints to ease the publish of metrics -@@ -1508,13 +1508,13 @@ test: +@@ -1524,13 +1524,13 @@ test: gitaly_address: unix:tmp/tests/gitaly/praefect.socket gitaly: diff --git a/www/gitlab-ce/pkg-message b/www/gitlab-ce/pkg-message index 075280813c2..39767293744 100644 --- a/www/gitlab-ce/pkg-message +++ b/www/gitlab-ce/pkg-message @@ -6,7 +6,7 @@ Gitlab was installed successfully. You now need to set up the various components of Gitlab, so please follow the instructions in the guide at: -https://gitlab.fechner.net/mfechner/Gitlab-docu/blob/master/install/14.8-freebsd.md +https://gitlab.fechner.net/mfechner/Gitlab-docu/blob/master/install/14.9-freebsd.md EOM type: install } @@ -20,7 +20,7 @@ EOM If you just installed an major upgrade of GitLab, for example you switched from 14.5.x to 14.6.x, please follow the instructions in the guide at: -https://gitlab.fechner.net/mfechner/Gitlab-docu/blob/master/update/14.7-14.8-freebsd.md +https://gitlab.fechner.net/mfechner/Gitlab-docu/blob/master/update/14.8-14.9-freebsd.md If you just installed an minor upgrade of GitLab please follow the instructions in the guide at: diff --git a/www/gitlab-pages/Makefile b/www/gitlab-pages/Makefile index 82760d8f4e1..cdc67d50d52 100644 --- a/www/gitlab-pages/Makefile +++ b/www/gitlab-pages/Makefile @@ -1,5 +1,5 @@ PORTNAME= gitlab-pages -PORTVERSION= 1.54.0 +PORTVERSION= 1.56.0 DISTVERSIONPREFIX= v CATEGORIES= www diff --git a/www/gitlab-pages/distinfo b/www/gitlab-pages/distinfo index 42de4519452..9830725b659 100644 --- a/www/gitlab-pages/distinfo +++ b/www/gitlab-pages/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1646774627 -SHA256 (go/www_gitlab-pages/gitlab-pages-v1.54.0/v1.54.0.mod) = 283add63c0a36e9e303f9f520eb37589dedb7591b2df50b29642df3ab15b6422 -SIZE (go/www_gitlab-pages/gitlab-pages-v1.54.0/v1.54.0.mod) = 1265 -SHA256 (go/www_gitlab-pages/gitlab-pages-v1.54.0/v1.54.0.zip) = a923582e9cc571aacf630a71b805066b9b8bd51682229a12f7bf0af2684dbe00 -SIZE (go/www_gitlab-pages/gitlab-pages-v1.54.0/v1.54.0.zip) = 352496 +TIMESTAMP = 1647950018 +SHA256 (go/www_gitlab-pages/gitlab-pages-v1.56.0/v1.56.0.mod) = 283add63c0a36e9e303f9f520eb37589dedb7591b2df50b29642df3ab15b6422 +SIZE (go/www_gitlab-pages/gitlab-pages-v1.56.0/v1.56.0.mod) = 1265 +SHA256 (go/www_gitlab-pages/gitlab-pages-v1.56.0/v1.56.0.zip) = ba4f3b6027d05493724d25ee293ecb879b177ffef117d467feef4f9134a7cab8 +SIZE (go/www_gitlab-pages/gitlab-pages-v1.56.0/v1.56.0.zip) = 360266 diff --git a/www/gitlab-workhorse/Makefile b/www/gitlab-workhorse/Makefile index f390ce3e434..6fd017a22e6 100644 --- a/www/gitlab-workhorse/Makefile +++ b/www/gitlab-workhorse/Makefile @@ -1,7 +1,7 @@ # Created by: Torsten Zuehlsdorff PORTNAME= gitlab-workhorse -PORTVERSION= 14.8.4 +PORTVERSION= 14.9.1 PORTREVISION= 0 CATEGORIES= www @@ -25,7 +25,7 @@ USE_GITLAB= yes GL_ACCOUNT= gitlab-org GL_PROJECT= gitlab-foss # Find the commit hash here: https://gitlab.com/gitlab-org/gitlab-foss/-/tags -GL_COMMIT= ef2df7d848f4b13324ed2202fc17b983f34b154d +GL_COMMIT= 5b713e1e9a309d1afff4c7068d1e03fbd6a5f7f7 # for go dependencies USE_GITHUB= nodefault diff --git a/www/gitlab-workhorse/distinfo b/www/gitlab-workhorse/distinfo index 199797464c3..08ecf9a844c 100644 --- a/www/gitlab-workhorse/distinfo +++ b/www/gitlab-workhorse/distinfo @@ -1,4 +1,4 @@ -TIMESTAMP = 1647759444 +TIMESTAMP = 1648219111 SHA256 (Azure-azure-pipeline-go-v0.2.3_GH0.tar.gz) = 99bd58f4a07dd02d9615e3638b3bb6dbfad80ef678ccdb8e17e3fa2b0fef343e SIZE (Azure-azure-pipeline-go-v0.2.3_GH0.tar.gz) = 17102 SHA256 (Azure-azure-storage-blob-go-v0.13.0_GH0.tar.gz) = 6bf7145210331efa3f0417f6684cf764c22743cf23122048ec136600daebf443 @@ -199,8 +199,8 @@ SHA256 (uber-jaeger-client-go-v2.27.0_GH0.tar.gz) = 7590acdefcbbf9553bd3415bc7e5 SIZE (uber-jaeger-client-go-v2.27.0_GH0.tar.gz) = 210139 SHA256 (uber-jaeger-lib-v2.4.1_GH0.tar.gz) = c178bcad325857dba29551c16f40707701adf6e3a9e01e1ca3e5edfc3c6de8bc SIZE (uber-jaeger-lib-v2.4.1_GH0.tar.gz) = 38010 -SHA256 (gitlab-org-gitlab-foss-ef2df7d848f4b13324ed2202fc17b983f34b154d_GL0.tar.gz) = a17424d58047178e5b6cb3cd87aa0e5203d6c900872466467598b935575758c3 -SIZE (gitlab-org-gitlab-foss-ef2df7d848f4b13324ed2202fc17b983f34b154d_GL0.tar.gz) = 102525535 +SHA256 (gitlab-org-gitlab-foss-5b713e1e9a309d1afff4c7068d1e03fbd6a5f7f7_GL0.tar.gz) = 9685e3cb51f4fc0b22ce5a9dc0bf25cc925c269c1346fd45693a3377756cc166 +SIZE (gitlab-org-gitlab-foss-5b713e1e9a309d1afff4c7068d1e03fbd6a5f7f7_GL0.tar.gz) = 102356177 SHA256 (gitlab-org-gitaly-df7dadcc3f74276a7176234d4b1475299f46c05c_GL0.tar.gz) = 4c403ee52c1d42d54e9acd14026796782e8272e74c8eb7c3cedf9c924697647e SIZE (gitlab-org-gitaly-df7dadcc3f74276a7176234d4b1475299f46c05c_GL0.tar.gz) = 3703056 SHA256 (gitlab-org-golang-archive-zip-84007c99e03b62ee4dc62a94048d6e59552299d7_GL0.tar.gz) = 103a3e806657de5e2c7844326146c88490089fa8d07784270c359b5f5a8d06be diff --git a/www/gnome-user-share/Makefile b/www/gnome-user-share/Makefile index c311ce9c227..cf76b6123a7 100644 --- a/www/gnome-user-share/Makefile +++ b/www/gnome-user-share/Makefile @@ -3,7 +3,7 @@ PORTNAME= gnome-user-share PORTVERSION= 3.14.0 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= www deskutils gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/www/gstreamer1-plugins-neon/Makefile b/www/gstreamer1-plugins-neon/Makefile index a83c76edb3b..b24e8fa7074 100644 --- a/www/gstreamer1-plugins-neon/Makefile +++ b/www/gstreamer1-plugins-neon/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= www COMMENT= GStreamer neon http source plugin diff --git a/www/hiawatha/Makefile b/www/hiawatha/Makefile index 4b006272f1b..ad2b370242e 100644 --- a/www/hiawatha/Makefile +++ b/www/hiawatha/Makefile @@ -2,7 +2,7 @@ PORTNAME= hiawatha PORTVERSION= 10.12 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= www MASTER_SITES= https://www.hiawatha-webserver.org/files/ diff --git a/www/kannel-sqlbox/Makefile b/www/kannel-sqlbox/Makefile index 93c8ddd3c10..b0632d1c6b4 100644 --- a/www/kannel-sqlbox/Makefile +++ b/www/kannel-sqlbox/Makefile @@ -1,6 +1,6 @@ PORTNAME= kannel-sqlbox PORTVERSION= 1.0.1 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= www MAINTAINER= dbaio@FreeBSD.org diff --git a/www/kannel/Makefile b/www/kannel/Makefile index 8f73f7bf71b..ce067deebe1 100644 --- a/www/kannel/Makefile +++ b/www/kannel/Makefile @@ -2,7 +2,7 @@ PORTNAME= kannel PORTVERSION= 1.4.4 -PORTREVISION= 9 +PORTREVISION= 10 PORTEPOCH= 1 CATEGORIES= www MASTER_SITES= http://www.kannel.org/download/${PORTVERSION}/ diff --git a/www/midori/Makefile b/www/midori/Makefile index 8af0e20edf7..4f241ac3045 100644 --- a/www/midori/Makefile +++ b/www/midori/Makefile @@ -3,7 +3,7 @@ PORTNAME= midori PORTVERSION= 9.0 DISTVERSIONPREFIX= v -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= www xfce MAINTAINER= xfce@FreeBSD.org diff --git a/www/mod_authnz_crowd/Makefile b/www/mod_authnz_crowd/Makefile index ffcef1c1903..0c418bc2f6b 100644 --- a/www/mod_authnz_crowd/Makefile +++ b/www/mod_authnz_crowd/Makefile @@ -2,7 +2,7 @@ PORTNAME= mod_authnz_crowd PORTVERSION= 2.2.2 -PORTREVISION= 11 +PORTREVISION= 12 CATEGORIES= www MASTER_SITES= http://downloads.atlassian.com/software/crowd/downloads/cwdapache/ DIST_SUBDIR= apache2 diff --git a/www/mod_php74/Makefile b/www/mod_php74/Makefile index 1c81ca2d3c5..ca7ece3207e 100644 --- a/www/mod_php74/Makefile +++ b/www/mod_php74/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= www devel PKGNAMEPREFIX= mod_ diff --git a/www/mod_php80/Makefile b/www/mod_php80/Makefile index cb7d4ce11e3..3d324fb2bf0 100644 --- a/www/mod_php80/Makefile +++ b/www/mod_php80/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= www devel PKGNAMEPREFIX= mod_ diff --git a/www/mod_php81/Makefile b/www/mod_php81/Makefile index 5cf31b881fe..12061ff0ffb 100644 --- a/www/mod_php81/Makefile +++ b/www/mod_php81/Makefile @@ -1,3 +1,4 @@ +PORTREVISION= 1 CATEGORIES= www devel PKGNAMEPREFIX= mod_ diff --git a/www/mod_proxy_xml/Makefile b/www/mod_proxy_xml/Makefile index d045e58070f..f6cf0035290 100644 --- a/www/mod_proxy_xml/Makefile +++ b/www/mod_proxy_xml/Makefile @@ -2,7 +2,7 @@ PORTNAME= mod_proxy_xml PORTVERSION= 0.1 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= www MASTER_SITES= http://apache.webthing.com/svn/apache/filters/xmlns/ PKGNAMEPREFIX= ${APACHE_PKGNAMEPREFIX} diff --git a/www/mod_xmlns/Makefile b/www/mod_xmlns/Makefile index a931578daea..48ab78f737e 100644 --- a/www/mod_xmlns/Makefile +++ b/www/mod_xmlns/Makefile @@ -2,7 +2,7 @@ PORTNAME= mod_xmlns PORTVERSION= 0.97 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= www MASTER_SITES= http://apache.webthing.com/svn/apache/filters/xmlns/ PKGNAMEPREFIX= ${APACHE_PKGNAMEPREFIX} diff --git a/www/newsboat/Makefile b/www/newsboat/Makefile index cc1a3f762ed..83c01fe8f36 100644 --- a/www/newsboat/Makefile +++ b/www/newsboat/Makefile @@ -2,7 +2,7 @@ PORTNAME= newsboat DISTVERSION= 2.25 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= www MASTER_SITES= https://newsboat.org/releases/${DISTVERSION}/ DISTFILES= ${DISTNAME}${EXTRACT_SUFX} diff --git a/www/nghttp2/Makefile b/www/nghttp2/Makefile index 9b2c9712685..80be74fd2e8 100644 --- a/www/nghttp2/Makefile +++ b/www/nghttp2/Makefile @@ -2,6 +2,7 @@ PORTNAME= nghttp2 PORTVERSION= 1.46.0 +PORTREVISION= 1 CATEGORIES= www net MASTER_SITES= https://github.com/nghttp2/nghttp2/releases/download/v${PORTVERSION}/ diff --git a/www/nginx-full/Makefile b/www/nginx-full/Makefile index dffa7a85457..f43248d4d94 100644 --- a/www/nginx-full/Makefile +++ b/www/nginx-full/Makefile @@ -1,7 +1,7 @@ # Created by: Bartek Rutkowski PORTNAME= nginx -PORTREVISION= 7 +PORTREVISION= 8 PKGNAMESUFFIX= -full MAINTAINER= joneum@FreeBSD.org diff --git a/www/onlyoffice-documentserver/Makefile b/www/onlyoffice-documentserver/Makefile index 2b93a2b4884..164bb29a058 100644 --- a/www/onlyoffice-documentserver/Makefile +++ b/www/onlyoffice-documentserver/Makefile @@ -36,7 +36,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}supervisor>0:sysutils/py-supervisor@${PY_FLA rabbitmq>0:net/rabbitmq \ webfonts>=0:x11-fonts/webfonts -USES= autoreconf:build dos2unix fakeroot gmake gnome iconv nodejs:16,build pkgconfig \ +USES= autoreconf:build dos2unix fakeroot gmake gnome iconv localbase nodejs:16,build pkgconfig \ python:3.7+,build qt:5 USE_QT= qmake_build USE_GITHUB= yes diff --git a/www/p5-CGI-Compile/Makefile b/www/p5-CGI-Compile/Makefile index d35273c8d3c..4e64ebdf0cd 100644 --- a/www/p5-CGI-Compile/Makefile +++ b/www/p5-CGI-Compile/Makefile @@ -1,5 +1,5 @@ PORTNAME= CGI-Compile -PORTVERSION= 0.19 +PORTVERSION= 0.25 CATEGORIES= www perl5 MASTER_SITES= CPAN MASTER_SITE_SUBDIR= CPAN:MIYAGAWA diff --git a/www/p5-CGI-Compile/distinfo b/www/p5-CGI-Compile/distinfo index 422a1ab96be..b90b6177657 100644 --- a/www/p5-CGI-Compile/distinfo +++ b/www/p5-CGI-Compile/distinfo @@ -1,2 +1,3 @@ -SHA256 (CGI-Compile-0.19.tar.gz) = 288c40ec5ebe0a182163f370c20b3645a1dcbd4b0414aa4ead6140c53107543f -SIZE (CGI-Compile-0.19.tar.gz) = 19246 +TIMESTAMP = 1648190811 +SHA256 (CGI-Compile-0.25.tar.gz) = f44b74eedfbd1eb8f0f9688f9ddae15422e5fa48ae78be21b0afcb9e32490ea5 +SIZE (CGI-Compile-0.25.tar.gz) = 22479 diff --git a/www/p5-Dancer-Session-Cookie/Makefile b/www/p5-Dancer-Session-Cookie/Makefile index 9c3d1e337d3..2ceb62fb5e2 100644 --- a/www/p5-Dancer-Session-Cookie/Makefile +++ b/www/p5-Dancer-Session-Cookie/Makefile @@ -1,5 +1,5 @@ PORTNAME= Dancer-Session-Cookie -PORTVERSION= 0.27 +PORTVERSION= 0.30 CATEGORIES= www perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/www/p5-Dancer-Session-Cookie/distinfo b/www/p5-Dancer-Session-Cookie/distinfo index d302ae318bc..f5615f95948 100644 --- a/www/p5-Dancer-Session-Cookie/distinfo +++ b/www/p5-Dancer-Session-Cookie/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1482612706 -SHA256 (Dancer-Session-Cookie-0.27.tar.gz) = 862759fffa19621a62cf9989e4ede84df9c2c8e83ac731b877a202cce304b459 -SIZE (Dancer-Session-Cookie-0.27.tar.gz) = 23845 +TIMESTAMP = 1648197485 +SHA256 (Dancer-Session-Cookie-0.30.tar.gz) = 526790cf91b8d2501a58292284bf511a2f00b05f51aa0aff713ff5166b4d6412 +SIZE (Dancer-Session-Cookie-0.30.tar.gz) = 27033 diff --git a/www/p5-Dancer2/Makefile b/www/p5-Dancer2/Makefile index 5532e60e104..5c7c291f19d 100644 --- a/www/p5-Dancer2/Makefile +++ b/www/p5-Dancer2/Makefile @@ -1,7 +1,7 @@ # Created by: Wen Heping PORTNAME= Dancer2 -PORTVERSION= 0.301004 +PORTVERSION= 0.400000 CATEGORIES= www perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/www/p5-Dancer2/distinfo b/www/p5-Dancer2/distinfo index 20377d823d2..d4068bd2196 100644 --- a/www/p5-Dancer2/distinfo +++ b/www/p5-Dancer2/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1623291441 -SHA256 (Dancer2-0.301004.tar.gz) = 08ee0d9efbcc1a1a2059da5a1b99041eca8acee33b14e3d0b4cbbd010009dde5 -SIZE (Dancer2-0.301004.tar.gz) = 400506 +TIMESTAMP = 1648192217 +SHA256 (Dancer2-0.400000.tar.gz) = e6cf7a5eed29261087d2ce6db631d28b4e870cb14f31c2dfa2f77c0163bc435c +SIZE (Dancer2-0.400000.tar.gz) = 402506 diff --git a/www/p5-Dancer2/pkg-plist b/www/p5-Dancer2/pkg-plist index 27659749569..195fba20adb 100644 --- a/www/p5-Dancer2/pkg-plist +++ b/www/p5-Dancer2/pkg-plist @@ -36,6 +36,7 @@ bin/dancer2 %%SITE_PERL%%/Dancer2/Core/Session.pm %%SITE_PERL%%/Dancer2/Core/Time.pm %%SITE_PERL%%/Dancer2/Core/Types.pm +%%SITE_PERL%%/Dancer2/DeprecationPolicy.pod %%SITE_PERL%%/Dancer2/FileUtils.pm %%SITE_PERL%%/Dancer2/Handler/AutoPage.pm %%SITE_PERL%%/Dancer2/Handler/File.pm @@ -128,6 +129,7 @@ bin/dancer2 %%PERL5_MAN3%%/Dancer2::Core::Session.3.gz %%PERL5_MAN3%%/Dancer2::Core::Time.3.gz %%PERL5_MAN3%%/Dancer2::Core::Types.3.gz +%%PERL5_MAN3%%/Dancer2::DeprecationPolicy.3.gz %%PERL5_MAN3%%/Dancer2::FileUtils.3.gz %%PERL5_MAN3%%/Dancer2::Handler::AutoPage.3.gz %%PERL5_MAN3%%/Dancer2::Handler::File.3.gz diff --git a/www/p5-HTTP-Exception/Makefile b/www/p5-HTTP-Exception/Makefile index 04f06605694..2d40859008a 100644 --- a/www/p5-HTTP-Exception/Makefile +++ b/www/p5-HTTP-Exception/Makefile @@ -1,6 +1,5 @@ PORTNAME= HTTP-Exception -PORTVERSION= 0.04006 -PORTREVISION= 2 +PORTVERSION= 0.04007 CATEGORIES= www perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/www/p5-HTTP-Exception/distinfo b/www/p5-HTTP-Exception/distinfo index 8ef33e16372..7e03b7415a7 100644 --- a/www/p5-HTTP-Exception/distinfo +++ b/www/p5-HTTP-Exception/distinfo @@ -1,2 +1,3 @@ -SHA256 (HTTP-Exception-0.04006.tar.gz) = 6278de96ef9158927e07e21205875916a91d9c9e27c1cbd16444b98352b292d3 -SIZE (HTTP-Exception-0.04006.tar.gz) = 25023 +TIMESTAMP = 1648199724 +SHA256 (HTTP-Exception-0.04007.tar.gz) = fc56a2f2455fef9697b9797a7584c5097d6fc686e65f54a3a5f3fed34aea7443 +SIZE (HTTP-Exception-0.04007.tar.gz) = 26198 diff --git a/www/p5-HTTP-Session2/Makefile b/www/p5-HTTP-Session2/Makefile index ac1ff2279a8..cffedc63265 100644 --- a/www/p5-HTTP-Session2/Makefile +++ b/www/p5-HTTP-Session2/Makefile @@ -1,6 +1,5 @@ PORTNAME= HTTP-Session2 -PORTVERSION= 1.09 -PORTREVISION= 1 +PORTVERSION= 1.10 CATEGORIES= www perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- @@ -20,7 +19,7 @@ RUN_DEPENDS= \ BUILD_DEPENDS= ${RUN_DEPENDS} USES= perl5 -USE_PERL5= modbuild +USE_PERL5= modbuildtiny NO_ARCH= YES .include diff --git a/www/p5-HTTP-Session2/distinfo b/www/p5-HTTP-Session2/distinfo index c507a3fd1b0..313675fc461 100644 --- a/www/p5-HTTP-Session2/distinfo +++ b/www/p5-HTTP-Session2/distinfo @@ -1,2 +1,3 @@ -SHA256 (HTTP-Session2-1.09.tar.gz) = 585ae6a700b145f6a8359c60ee6e421075b63504ad06eea2e14d715c60fe57e4 -SIZE (HTTP-Session2-1.09.tar.gz) = 21500 +TIMESTAMP = 1648198873 +SHA256 (HTTP-Session2-1.10.tar.gz) = 03d1f3c770ff996e36f9204510abe092d1f8cf62c5f97d716cc2efb3ff970793 +SIZE (HTTP-Session2-1.10.tar.gz) = 21205 diff --git a/www/p5-HTTP-Session2/pkg-plist b/www/p5-HTTP-Session2/pkg-plist index 74851279ed1..1b76d19b573 100644 --- a/www/p5-HTTP-Session2/pkg-plist +++ b/www/p5-HTTP-Session2/pkg-plist @@ -2,6 +2,8 @@ %%PERL5_MAN3%%/HTTP::Session2::Base.3.gz %%PERL5_MAN3%%/HTTP::Session2::ClientStore.3.gz %%PERL5_MAN3%%/HTTP::Session2::ClientStore2.3.gz +%%PERL5_MAN3%%/HTTP::Session2::Expired.3.gz +%%PERL5_MAN3%%/HTTP::Session2::Random.3.gz %%PERL5_MAN3%%/HTTP::Session2::ServerStore.3.gz %%SITE_PERL%%/HTTP/Session2.pm %%SITE_PERL%%/HTTP/Session2/Base.pm diff --git a/www/p5-Session-Storage-Secure/Makefile b/www/p5-Session-Storage-Secure/Makefile index 7f623fc178b..390d3a28083 100644 --- a/www/p5-Session-Storage-Secure/Makefile +++ b/www/p5-Session-Storage-Secure/Makefile @@ -1,6 +1,5 @@ PORTNAME= Session-Storage-Secure -PORTVERSION= 0.010 -PORTREVISION= 1 +PORTVERSION= 1.000 CATEGORIES= www perl5 MASTER_SITES= CPAN MASTER_SITE_SUBDIR= CPAN:DAGOLDEN @@ -12,7 +11,7 @@ COMMENT= Perl extension for encrypted, serialized session data with integrity LICENSE= APACHE20 RUN_DEPENDS= \ - p5-Crypt-CBC>0:security/p5-Crypt-CBC \ + p5-Crypt-CBC>3.01:security/p5-Crypt-CBC \ p5-Crypt-Rijndael>0:security/p5-Crypt-Rijndael \ p5-Crypt-URandom>0:security/p5-Crypt-URandom \ p5-Math-Random-ISAAC-XS>0:math/p5-Math-Random-ISAAC-XS \ diff --git a/www/p5-Session-Storage-Secure/distinfo b/www/p5-Session-Storage-Secure/distinfo index 4bdd9c4ac9b..c3e2eb83f6d 100644 --- a/www/p5-Session-Storage-Secure/distinfo +++ b/www/p5-Session-Storage-Secure/distinfo @@ -1,2 +1,3 @@ -SHA256 (Session-Storage-Secure-0.010.tar.gz) = 3ef731772f7871bf4d5fe87387395c4b994345fc6268134b28978f82dde1c11f -SIZE (Session-Storage-Secure-0.010.tar.gz) = 24990 +TIMESTAMP = 1648200085 +SHA256 (Session-Storage-Secure-1.000.tar.gz) = 58b2f04eba56a9924494bb3eddd426affa75c3dded1c563f8296fcc13f8e666b +SIZE (Session-Storage-Secure-1.000.tar.gz) = 26371 diff --git a/www/pecl-solr/Makefile b/www/pecl-solr/Makefile index 6ea835535e3..0318318daf2 100644 --- a/www/pecl-solr/Makefile +++ b/www/pecl-solr/Makefile @@ -2,6 +2,7 @@ PORTNAME= solr PORTVERSION= 2.5.1 +PORTREVISION= 1 CATEGORIES= www MAINTAINER= gaod@hychen.org diff --git a/www/py-WebError/files/patch-2to3 b/www/py-WebError/files/patch-2to3 new file mode 100644 index 00000000000..0ac9ce46f1d --- /dev/null +++ b/www/py-WebError/files/patch-2to3 @@ -0,0 +1,319 @@ +--- weberror/evalcontext.py.orig 2016-04-10 01:43:23 UTC ++++ weberror/evalcontext.py +@@ -1,4 +1,4 @@ +-from cStringIO import StringIO ++from io import StringIO + import traceback + import threading + import pdb +@@ -32,7 +32,7 @@ class EvalContext(object): + sys.stdout = out + try: + code = compile(s, '', "single", 0, 1) +- exec code in self.namespace, self.globs ++ exec(code, self.namespace, self.globs) + debugger.set_continue() + except KeyboardInterrupt: + raise +--- weberror/evalexception.py.orig 2016-04-10 01:43:23 UTC ++++ weberror/evalexception.py +@@ -22,18 +22,18 @@ to see the full debuggable traceback. Also, this URL + ``wsgi.errors``, so you can open it up in another browser window. + + """ +-import httplib ++import http.client + import sys + import os + import cgi + import traceback +-from cStringIO import StringIO ++from io import StringIO + import pprint + import itertools + import time + import re + import types +-import urllib ++import urllib.request, urllib.parse, urllib.error + + from pkg_resources import resource_filename + +@@ -43,7 +43,7 @@ from paste import request + from paste import urlparser + from paste.util import import_string + +-import evalcontext ++from . import evalcontext + from weberror import errormiddleware, formatter, collector + from weberror.util import security + from tempita import HTMLTemplate +@@ -124,7 +124,7 @@ def wsgiapp(): + form['environ'] = environ + try: + res = func(*args, **form.mixed()) +- except ValueError, ve: ++ except ValueError as ve: + status = '500 Server Error' + res = 'There was an error: %s' % \ + html_quote(ve) +@@ -150,7 +150,7 @@ def get_debug_info(func): + debugcount = req.params['debugcount'] + try: + debugcount = int(debugcount) +- except ValueError, e: ++ except ValueError as e: + return exc.HTTPBadRequest( + "Invalid value for debugcount (%r): %s" + % (debugcount, e)) +@@ -197,7 +197,7 @@ def get_debug_count(req): + elif 'weberror.evalexception.debug_count' in environ: + return environ['weberror.evalexception.debug_count'] + else: +- next = debug_counter.next() ++ next = next(debug_counter) + environ['weberror.evalexception.debug_count'] = next + environ['paste.evalexception.debug_count'] = next + return next +@@ -279,7 +279,7 @@ class EvalException(object): + libraries=self.libraries)[0] + host = req.GET['host'] + headers = req.headers +- conn = httplib.HTTPConnection(host) ++ conn = http.client.HTTPConnection(host) + headers = {'Content-Length':len(long_xml_er), + 'Content-Type':'application/xml'} + conn.request("POST", req.GET['path'], long_xml_er, headers=headers) +@@ -311,7 +311,7 @@ class EvalException(object): + """ + res = Response(content_type='text/x-json') + data = []; +- items = self.debug_infos.values() ++ items = list(self.debug_infos.values()) + items.sort(lambda a, b: cmp(a.created, b.created)) + data = [item.json() for item in items] + res.body = repr(data) +@@ -525,7 +525,7 @@ class DebugInfo(object): + if id(frame) == tbid: + return frame + else: +- raise ValueError, ( ++ raise ValueError( + "No frame by id %s found from %r" % (tbid, self.frames)) + + def wsgi_application(self, environ, start_response): +@@ -601,7 +601,7 @@ class EvalHTMLFormatter(formatter.HTMLFormatter): + + def make_table(items): + if hasattr(items, 'items'): +- items = items.items() ++ items = list(items.items()) + items.sort() + return table_template.substitute( + html_quote=html_quote, +@@ -641,7 +641,7 @@ def pprint_format(value, safe=False): + out = StringIO() + try: + pprint.pprint(value, out) +- except Exception, e: ++ except Exception as e: + if safe: + out.write('Error: %s' % e) + else: +@@ -781,12 +781,12 @@ def make_eval_exception(app, global_conf, xmlhttp_key= + xmlhttp_key = global_conf.get('xmlhttp_key', '_') + if reporters is None: + reporters = global_conf.get('error_reporters') +- if reporters and isinstance(reporters, basestring): ++ if reporters and isinstance(reporters, str): + reporter_strings = reporters.split() + reporters = [] + for reporter_string in reporter_strings: + reporter = import_string.eval_import(reporter_string) +- if isinstance(reporter, (type, types.ClassType)): ++ if isinstance(reporter, type): + reporter = reporter() + reporters.append(reporter) + return EvalException(app, xmlhttp_key=xmlhttp_key, reporters=reporters) +--- weberror/formatter.py.orig 2016-04-10 01:43:23 UTC ++++ weberror/formatter.py +@@ -63,7 +63,7 @@ class AbstractFormatter(object): + def format_collected_data(self, exc_data): + general_data = {} + if self.show_extra_data: +- for name, value_list in exc_data.extra_data.items(): ++ for name, value_list in list(exc_data.extra_data.items()): + if isinstance(name, tuple): + importance, title = name + else: +@@ -116,17 +116,17 @@ class AbstractFormatter(object): + if res: + lines.append(res) + etype = exc_data.exception_type +- if not isinstance(etype, basestring): ++ if not isinstance(etype, str): + etype = etype.__name__ + exc_info = self.format_exception_info( + etype, + exc_data.exception_value) + data_by_importance = {'important': [], 'normal': [], + 'supplemental': [], 'extra': []} +- for (importance, name), value in general_data.items(): ++ for (importance, name), value in list(general_data.items()): + data_by_importance[importance].append( + (name, value)) +- for value in data_by_importance.values(): ++ for value in list(data_by_importance.values()): + value.sort() + return self.format_combine(data_by_importance, lines, exc_info) + +@@ -269,12 +269,12 @@ class TextFormatter(AbstractFormatter): + return '%s: %s' % (title, s) + elif isinstance(value, dict): + lines = ['\n', title, '-'*len(title)] +- items = value.items() ++ items = list(value.items()) + items.sort() + for n, v in items: + try: + v = repr(v) +- except Exception, e: ++ except Exception as e: + v = 'Cannot display: %s' % e + v = truncate(v) + lines.append(' %s: %s' % (n, v)) +@@ -346,7 +346,7 @@ class HTMLFormatter(TextFormatter): + elif (isinstance(value, (list, tuple)) + and self.long_item_list(value)): + return '%s: [
\n    %s]
' % ( +- title, ',
    '.join(map(self.quote, map(repr, value)))) ++ title, ',
    '.join(map(self.quote, list(map(repr, value))))) + else: + return '%s: %s' % (title, self.quote(repr(value))) + +@@ -370,7 +370,7 @@ class HTMLFormatter(TextFormatter): + + def zebra_table(self, title, rows, table_class="variables"): + if isinstance(rows, dict): +- rows = rows.items() ++ rows = list(rows.items()) + rows.sort() + table = ['' % table_class, + '' +@@ -379,7 +379,7 @@ class HTMLFormatter(TextFormatter): + for name, value in rows: + try: + value = repr(value) +- except Exception, e: ++ except Exception as e: + value = 'Cannot print: %s' % e + odd = not odd + table.append( +@@ -423,7 +423,7 @@ def get_libraries(libs=None): + return {} + + def create_text_node(doc, elem, text): +- if not isinstance(text, basestring): ++ if not isinstance(text, str): + try: + text = escaping.removeIllegalChars(repr(text)) + except: +@@ -449,7 +449,7 @@ class XMLFormatter(AbstractFormatter): + libs = get_libraries(self.extra_kwargs.get('libraries')) + if libs: + libraries = newdoc.createElement('libraries') +- for k, v in libs.iteritems(): ++ for k, v in libs.items(): + lib = newdoc.createElement('library') + lib.attributes['version'] = v + lib.attributes['name'] = k +@@ -493,7 +493,7 @@ class XMLFormatter(AbstractFormatter): + # variables.appendChild(variable) + + etype = exc_data.exception_type +- if not isinstance(etype, basestring): ++ if not isinstance(etype, str): + etype = etype.__name__ + + top_element.appendChild(self.format_exception_info( +@@ -677,6 +677,6 @@ def make_pre_wrappable(html, wrap_limit=60, + return '\n'.join(lines) + + def convert_to_str(s): +- if isinstance(s, unicode): ++ if isinstance(s, str): + return s.encode('utf8') + return s +--- weberror/pdbcapture.py.orig 2016-04-10 01:43:23 UTC ++++ weberror/pdbcapture.py +@@ -50,7 +50,7 @@ class PdbCapture(object): + return self.media_app(environ, start_response) + resp = self.internal_request(req) + return resp(environ, start_response) +- id = self.counter.next() ++ id = next(self.counter) + state = dict(id=id, + event=threading.Event(), + base_url=req.application_url, +@@ -66,7 +66,7 @@ class PdbCapture(object): + resp = state['response'] + return resp(environ, start_response) + if 'exc_info' in state: +- raise state['exc_info'][0], state['exc_info'][1], state['exc_info'][2] ++ raise state['exc_info'][0](state['exc_info'][1]).with_traceback(state['exc_info'][2]) + self.states[id] = state + tmpl = self.get_template('pdbcapture_response.html') + body = tmpl.substitute(req=req, state=state, id=id) +--- weberror/util/security.py.orig 2016-04-10 01:43:23 UTC ++++ weberror/util/security.py +@@ -21,7 +21,7 @@ else: + expected_len = len(expected) + result = actual_len ^ expected_len + if expected_len > 0: +- for i in xrange(actual_len): ++ for i in range(actual_len): + result |= ord(actual[i]) ^ ord(expected[i % expected_len]) + return result == 0 + +@@ -43,7 +43,7 @@ def valid_csrf_token(secret, token): + try: + expiry_ts, hashed = token.split(',') + expiry_dt = datetime.utcfromtimestamp(int(expiry_ts)) +- except ValueError, e: ++ except ValueError as e: + return False + + if expiry_dt < datetime.utcnow(): +--- weberror/util/serial_number_generator.py.orig 2016-04-10 01:43:23 UTC ++++ weberror/util/serial_number_generator.py +@@ -19,7 +19,7 @@ base = len(good_characters) + + def lazy_result(return_type, dummy_initial=None): + """Decorator to allow for on-demand evaluation (limited scope of use!)""" +- if not issubclass(return_type, basestring): ++ if not issubclass(return_type, str): + raise NotImplementedError + + class _lazy_class(return_type): +@@ -54,7 +54,7 @@ def make_identifier(number): + """ + Encodes a number as an identifier. + """ +- if not isinstance(number, (int, long)): ++ if not isinstance(number, int): + raise ValueError( + "You can only make identifiers out of integers (not %r)" + % number) +@@ -90,10 +90,10 @@ def hash_identifier(s, length, pad=True, hasher=md5, p + # Accept sha/md5 modules as well as callables + hasher = hasher.new + if length > 26 and hasher is md5: +- raise ValueError, ( ++ raise ValueError( + "md5 cannot create hashes longer than 26 characters in " + "length (you gave %s)" % length) +- if isinstance(s, unicode): ++ if isinstance(s, str): + s = s.encode('utf-8') + h = hasher(str(s)) + bin_hash = h.digest() diff --git a/www/py-aioh2/Makefile b/www/py-aioh2/Makefile index dcf1ce40304..23c495a4da4 100644 --- a/www/py-aioh2/Makefile +++ b/www/py-aioh2/Makefile @@ -12,7 +12,7 @@ COMMENT= HTTP/2 implementation with hyper-h2 on Python 3 asyncio LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}h2>=3:www/py-h2@${PY_FLAVOR} \ +xRUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}h2>=3:www/py-h2@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}priority>=1.3.0:www/py-priority@${PY_FLAVOR} USES= python:3.4+ diff --git a/www/py-aioh2/files/patch-aioh2-helper.py b/www/py-aioh2/files/patch-aioh2-helper.py new file mode 100644 index 00000000000..1df8af12a2a --- /dev/null +++ b/www/py-aioh2/files/patch-aioh2-helper.py @@ -0,0 +1,8 @@ +--- aioh2/helper.py.orig 2018-02-05 02:31:19 UTC ++++ aioh2/helper.py +@@ -86,4 +86,4 @@ if hasattr(socket, 'AF_UNIX'): + if hasattr(asyncio, 'ensure_future'): # Python >= 3.5 + async_task = asyncio.ensure_future + else: +- async_task = asyncio.async ++ async_task = getattr(asyncio, "async") diff --git a/www/py-aioh2/files/patch-aioh2-protocol.py b/www/py-aioh2/files/patch-aioh2-protocol.py new file mode 100644 index 00000000000..1bad65b74c6 --- /dev/null +++ b/www/py-aioh2/files/patch-aioh2-protocol.py @@ -0,0 +1,11 @@ +--- aioh2/protocol.py.orig 2017-12-03 09:08:55 UTC ++++ aioh2/protocol.py +@@ -380,7 +380,7 @@ class H2Protocol(asyncio.Protocol): + if self._handler: + raise Exception('Handler was already set') + if handler: +- self._handler = asyncio.async(handler, loop=self._loop) ++ self._handler = async_task(handler, loop=self._loop) + + def close_connection(self): + self._transport.close() diff --git a/www/py-bleach/Makefile b/www/py-bleach/Makefile index 7eef3bb0a04..9b71a380e0c 100644 --- a/www/py-bleach/Makefile +++ b/www/py-bleach/Makefile @@ -1,7 +1,7 @@ # Created by: Kubilay Kocak PORTNAME= bleach -PORTVERSION= 3.1.5 +PORTVERSION= 4.1.0 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,26 +10,22 @@ MAINTAINER= sunpoet@FreeBSD.org COMMENT= Easy safelist-based HTML-sanitizing tool LICENSE= APACHE20 -LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}six>=1.9.0:devel/py-six@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}webencodings>0:converters/py-webencodings@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}packaging>0:devel/py-packaging@${PY_FLAVOR} -TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=3.0.0:devel/py-pytest@${PY_FLAVOR} +xRUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}packaging>=0:devel/py-packaging@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}six>=1.9.0:devel/py-six@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}webencodings>=0:converters/py-webencodings@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>=0,1:devel/py-pytest@${PY_FLAVOR} -USES= cpe python:3.6+ -USE_PYTHON= autoplist distutils - -CPE_VENDOR= mozilla +USES= cpe python:3.6+ shebangfix +USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes -# This is a development script -# TODO: Remove from sdist (via MANIFEST.ini) -post-extract: - ${RM} ${WRKSRC}/bleach/_vendor/pip_install_vendor.sh +CPE_VENDOR= mozilla + +SHEBANG_FILES= bleach/_vendor/vendor_install.sh do-test: - @cd ${WRKSRC} && ${PYTHON_CMD} -m pytest -v -rs -o addopts= + cd ${WRKSRC} && ${PYTHON_CMD} -m pytest -rs -v .include diff --git a/www/py-bleach/distinfo b/www/py-bleach/distinfo index 9bef3591ecd..6ad3db7f84d 100644 --- a/www/py-bleach/distinfo +++ b/www/py-bleach/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1588872176 -SHA256 (bleach-3.1.5.tar.gz) = 3c4c520fdb9db59ef139915a5db79f8b51bc2a7257ea0389f30c846883430a4b -SIZE (bleach-3.1.5.tar.gz) = 177972 +TIMESTAMP = 1647264688 +SHA256 (bleach-4.1.0.tar.gz) = 0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da +SIZE (bleach-4.1.0.tar.gz) = 195798 diff --git a/www/py-bleach/pkg-descr b/www/py-bleach/pkg-descr index 2b4180e181f..cd710a3f814 100644 --- a/www/py-bleach/pkg-descr +++ b/www/py-bleach/pkg-descr @@ -1,5 +1,17 @@ -Bleach is a Python HTML sanitizing library that escapes or strips markup and -attributes based on a white list. Bleach is intended for sanitizing text from -untrusted sources. +Bleach is an allowed-list-based HTML sanitizing library that escapes or strips +markup and attributes. + +Bleach can also linkify text safely, applying filters that Django's urlize +filter cannot, and optionally setting rel attributes, even on links already in +the text. + +Bleach is intended for sanitizing text from untrusted sources. If you find +yourself jumping through hoops to allow your site administrators to do lots of +things, you're probably outside the use cases. Either trust those users, or +don't. + +Because it relies on html5lib, Bleach is as good as modern browsers at dealing +with weird, quirky HTML fragments. And any of Bleach's methods will fix +unbalanced or mis-nested tags. WWW: https://github.com/mozilla/bleach diff --git a/www/py-dj32-django-modelcluster/Makefile b/www/py-dj32-django-modelcluster/Makefile index 40cf3e59b1f..d3745336f95 100644 --- a/www/py-dj32-django-modelcluster/Makefile +++ b/www/py-dj32-django-modelcluster/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= django-modelcluster -PORTVERSION= 5.2 +PORTVERSION= 5.3 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX}dj32- diff --git a/www/py-dj32-django-modelcluster/distinfo b/www/py-dj32-django-modelcluster/distinfo index 28006fa0bf2..60fed430441 100644 --- a/www/py-dj32-django-modelcluster/distinfo +++ b/www/py-dj32-django-modelcluster/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634391162 -SHA256 (django-modelcluster-5.2.tar.gz) = e541a46a0a899ef4778a4708be22e71cac3efacc09a6ff44bc065c5c9194c054 -SIZE (django-modelcluster-5.2.tar.gz) = 24332 +TIMESTAMP = 1647264690 +SHA256 (django-modelcluster-5.3.tar.gz) = 0347cdcacb19a1078ee56cc3e6d5413ba27b8a5900710c53bb92b5d8ff3819cd +SIZE (django-modelcluster-5.3.tar.gz) = 25870 diff --git a/www/py-django-allauth/Makefile b/www/py-django-allauth/Makefile index 6ee5b31d066..11cf9f5aed1 100644 --- a/www/py-django-allauth/Makefile +++ b/www/py-django-allauth/Makefile @@ -1,10 +1,10 @@ PORTNAME= django-allauth -PORTVERSION= 0.48.0 +PORTVERSION= 0.49.0 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} -MAINTAINER= ports@FreeBSD.org +MAINTAINER= sunpoet@FreeBSD.org COMMENT= User registration with social account authentication LICENSE= MIT @@ -12,12 +12,12 @@ LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}django22>=2.0:www/py-django22@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pyjwt>=1.7:www/py-pyjwt@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}requests-oauthlib>=0.3.0:www/py-requests-oauthlib@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}python3-openid>=3.0.8:security/py-python3-openid@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}requests>=0:www/py-requests@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}python3-openid>=3.0.8:security/py-python3-openid@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}requests-oauthlib>=0.3.0:www/py-requests-oauthlib@${PY_FLAVOR} USES= python:3.5+ -USE_PYTHON= autoplist distutils +USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/www/py-django-allauth/distinfo b/www/py-django-allauth/distinfo index 01783bc1508..f8164aee4a6 100644 --- a/www/py-django-allauth/distinfo +++ b/www/py-django-allauth/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645390379 -SHA256 (django-allauth-0.48.0.tar.gz) = 531821ce6a2278168054add13421776c9f8e565cf39926e799fa02d6c29da920 -SIZE (django-allauth-0.48.0.tar.gz) = 658354 +TIMESTAMP = 1647264692 +SHA256 (django-allauth-0.49.0.tar.gz) = f5fbb67376177c6a9276516dde98bcb01ac4160a5a27f7b340914dd521d04f12 +SIZE (django-allauth-0.49.0.tar.gz) = 702336 diff --git a/www/py-django-allauth/pkg-descr b/www/py-django-allauth/pkg-descr index 4d17e75998a..7d6322c755e 100644 --- a/www/py-django-allauth/pkg-descr +++ b/www/py-django-allauth/pkg-descr @@ -1,4 +1,5 @@ -django-allauth is a reusable Django app that allows for both local and -social authentication, with flows that just work. +django-allauth is a reusable Django app that allows for both local and social +authentication, with flows that just work. WWW: https://www.intenct.nl/projects/django-allauth/ +WWW: https://github.com/pennersr/django-allauth diff --git a/www/py-django-bootstrap-form/Makefile b/www/py-django-bootstrap-form/Makefile index 654b3509ae7..9e1ebc9f9ed 100644 --- a/www/py-django-bootstrap-form/Makefile +++ b/www/py-django-bootstrap-form/Makefile @@ -12,6 +12,9 @@ COMMENT= Bootstrap forms for django LICENSE= MIT +DEPRECATED= No longer maintained, not compatible with newer versions of Django +EXPIRATION_DATE= 2022-06-30 + USES= python:3.5+ USE_PYTHON= distutils autoplist diff --git a/www/py-django-hijack/Makefile b/www/py-django-hijack/Makefile index 5ae7530492f..48d6d9a293e 100644 --- a/www/py-django-hijack/Makefile +++ b/www/py-django-hijack/Makefile @@ -1,11 +1,13 @@ # Created by: Kevin Golding PORTNAME= django-hijack -PORTVERSION= 2.1.10 -PORTREVISION= 1 +PORTVERSION= 3.1.4 CATEGORIES= www python -MASTER_SITES= CHEESESHOP +MASTER_SITES= CHEESESHOP \ + https://ports.caomhin.org/:npmcache PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +DISTFILES= ${PORTNAME}-${PORTVERSION}${EXTRACT_SUFX} \ + ${PORTNAME}-npm-cache-${DISTVERSION}${EXTRACT_SUFX}:npmcache MAINTAINER= ports@caomhin.org COMMENT= Allows Django superusers to login as other users @@ -13,12 +15,42 @@ COMMENT= Allows Django superusers to login as other users LICENSE= MIT LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}django22>0:www/py-django22@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}django-compat>=1.0.14:www/py-django-compat@${PY_FLAVOR} +BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}setuptools_scm>0:devel/py-setuptools_scm@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pytest-runner>0:devel/py-pytest-runner@${PY_FLAVOR} \ + npm:www/npm +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}django22>=2.2:www/py-django22@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytest>0:devel/py-pytest@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pytest-django>0:devel/py-pytest-django@${PY_FLAVOR} -USES= python:3.5+ +USES= gettext-tools:build python:3.6+ USE_PYTHON= distutils autoplist +MAKE_ENV= npm_config_offline=true +TEST_ENV= DJANGO_SETTINGS_MODULES=hijack.tests.test_app.settings PYTHONPATH=${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} + NO_ARCH= yes +_MY_NPMCACHE= ${WRKDIR}/.npm + +# Add remaining files (e.g. *.po) that aren't recorded by setuptools +# during install. +post-install: + @${FIND} ${STAGEDIR} -type f -o -type l | \ + ${SORT} | ${SED} -e 's|${STAGEDIR}||' \ + > ${WRKDIR}/.PLIST.pymodtmp + +do-test: + @cd ${WRKSRC} && ${SETENV} ${TEST_ENV} ${PYTHON_CMD} -m pytest -v -rs -o addopts= + +# Helper target to make the generation of the npm cache easier +make-npm-cache: extract + @${RM} -r ${_MY_NPMCACHE} + @cd ${WRKSRC}/ \ + && ${SETENV} HOME=${WRKDIR} ${LOCALBASE}/bin/npm ci + @cd ${_MY_NPMCACHE} \ + && ${RM} _locks anonymous-cli-metrics.json + @cd ${WRKDIR} \ + && ${TAR} -czf ${PORTNAME}-npm-cache-${DISTVERSION}${EXTRACT_SUFX} .npm \ + && ${ECHO_CMD} "Please upload the file ${WRKDIR}/${PORTNAME}-npm-cache-${DISTVERSION}${EXTRACT_SUFX}" + .include diff --git a/www/py-django-hijack/distinfo b/www/py-django-hijack/distinfo index b84e250500c..a5fca74d58e 100644 --- a/www/py-django-hijack/distinfo +++ b/www/py-django-hijack/distinfo @@ -1,3 +1,5 @@ -TIMESTAMP = 1549044642 -SHA256 (django-hijack-2.1.10.tar.gz) = be484f0ca67a092d5bf9bf8a5307beb716dc2e86b56a69796479183fdeb9036c -SIZE (django-hijack-2.1.10.tar.gz) = 20480 +TIMESTAMP = 1647700803 +SHA256 (django-hijack-3.1.4.tar.gz) = 785940c2e693401d8302fff4ced2d8cf0beb69a88b7f944539b035ab11b1b6d3 +SIZE (django-hijack-3.1.4.tar.gz) = 213799 +SHA256 (django-hijack-npm-cache-3.1.4.tar.gz) = 54252454e9ba0f93d101494392814027f29792344a9d06cc188f970b5e0e49c5 +SIZE (django-hijack-npm-cache-3.1.4.tar.gz) = 13227839 diff --git a/www/py-django-hijack/files/patch-setup.py b/www/py-django-hijack/files/patch-setup.py new file mode 100644 index 00000000000..7527154ec8b --- /dev/null +++ b/www/py-django-hijack/files/patch-setup.py @@ -0,0 +1,35 @@ +Handle the installation by native setuptools to prevent errors like +"command 'install' has no such option 'single_version_externally_managed'". + +--- setup.py.orig 2022-03-12 16:51:11 UTC ++++ setup.py +@@ -5,7 +5,6 @@ import os + import subprocess # nosec + from distutils.cmd import Command + from distutils.command.build import build as _build +-from distutils.command.install import install as _install + + from setuptools import setup + +@@ -62,21 +61,11 @@ class build(_build): + ("compile_scss", None), + ] + +- +-class install(_install): +- sub_commands = [ +- *_install.sub_commands, +- ("compile_translations", None), +- ("compile_scss", None), +- ] +- +- + setup( + name="django-hijack", + use_scm_version=True, + cmdclass={ + "build": build, +- "install": install, + "compile_translations": compile_translations, + "compile_scss": compile_scss, + }, diff --git a/www/py-django-modelcluster/Makefile b/www/py-django-modelcluster/Makefile index 7bfe18d5a33..b8b7662aa9c 100644 --- a/www/py-django-modelcluster/Makefile +++ b/www/py-django-modelcluster/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= django-modelcluster -PORTVERSION= 5.2 +PORTVERSION= 5.3 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-django-modelcluster/distinfo b/www/py-django-modelcluster/distinfo index 08e5cf3c2ba..ef69861e73a 100644 --- a/www/py-django-modelcluster/distinfo +++ b/www/py-django-modelcluster/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1634391166 -SHA256 (django-modelcluster-5.2.tar.gz) = e541a46a0a899ef4778a4708be22e71cac3efacc09a6ff44bc065c5c9194c054 -SIZE (django-modelcluster-5.2.tar.gz) = 24332 +TIMESTAMP = 1647264694 +SHA256 (django-modelcluster-5.3.tar.gz) = 0347cdcacb19a1078ee56cc3e6d5413ba27b8a5900710c53bb92b5d8ff3819cd +SIZE (django-modelcluster-5.3.tar.gz) = 25870 diff --git a/www/py-django-sudo/Makefile b/www/py-django-sudo/Makefile index a20b9c8b65e..1eb77272b88 100644 --- a/www/py-django-sudo/Makefile +++ b/www/py-django-sudo/Makefile @@ -12,6 +12,9 @@ COMMENT= Sudo mode is an extra layer of security for Django LICENSE= BSD3CLAUSE +DEPRECATED= Abandoned upstream, not compatible with newer versions of Django +EXPIRATION_DATE= 2022-06-30 + RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}django22>=0:www/py-django22@${PY_FLAVOR} USES= python:3.5+ diff --git a/www/py-django/Makefile b/www/py-django/Makefile index edaee4227c6..0e042249dc9 100644 --- a/www/py-django/Makefile +++ b/www/py-django/Makefile @@ -10,6 +10,9 @@ COMMENT= High-level Python Web Framework (meta port) LICENSE= BSD3CLAUSE +DEPRECATED= Was used as a metaport, no longer required +EXPIRATION_DATE= 2022-06-30 + USES= metaport USE_PYTHON= flavors diff --git a/www/py-dtflickr/files/patch-2to3 b/www/py-dtflickr/files/patch-2to3 new file mode 100644 index 00000000000..eb1fc3f77ce --- /dev/null +++ b/www/py-dtflickr/files/patch-2to3 @@ -0,0 +1,103 @@ +--- dtflickr/__init__.py.orig 2009-09-21 01:59:19 UTC ++++ dtflickr/__init__.py +@@ -19,10 +19,10 @@ + # limitations under the License. + + import hashlib +-import _methods ++from . import _methods + import re + import time +-import urllib, urllib2 ++import urllib.request, urllib.parse, urllib.error, urllib.request, urllib.error, urllib.parse + + try: + import simplejson as json +@@ -50,7 +50,7 @@ for namespace, methods in _methods.namespaces: + for method, documentation in methods: + code += ' def ' + method + '(self, **arguments):\n ' + repr(documentation) + '\n return self._execute(\'' + method + '\', **arguments)\n' + +- exec code in globals(), locals() ++ exec(code, globals(), locals()) + + del namespace, methods, method, documentation, code + +@@ -92,19 +92,19 @@ class Flickr: + self.__signature = None + + for namespace, methods in _methods.namespaces: +- exec 'self.' + namespace + ' = ' + _methods.namespace(namespace) + '(self)' ++ exec('self.' + namespace + ' = ' + _methods.namespace(namespace) + '(self)') + + self.__cache = {} + + def _execute(self, method, **arguments): +- for name, value in arguments.iteritems(): +- arguments[name] = unicode(value).encode('utf8') ++ for name, value in arguments.items(): ++ arguments[name] = str(value).encode('utf8') + + arguments['api_key'] = self.__api_key + arguments['format'] = 'json' + arguments['method'] = 'flickr.' + method + arguments['nojsoncallback'] = 1 +- parameters = arguments.items() ++ parameters = list(arguments.items()) + + parameters.sort() + +@@ -112,31 +112,31 @@ class Flickr: + signature = self.__signature.copy() + + for name, value in parameters: +- signature.update(name + unicode(value).encode('utf8')) ++ signature.update(name + str(value).encode('utf8')) + + parameters.append(('api_sig', signature.hexdigest())) + +- parameters = urllib.urlencode(parameters) ++ parameters = urllib.parse.urlencode(parameters) + cached = self.__cache.get(parameters) + + if cached is not None and cached[0] > time.time(): + response = cached[1] + now = time.time() + +- for parameters, cached in self.__cache.items(): ++ for parameters, cached in list(self.__cache.items()): + if cached[0] <= now: + del self.__cache[parameters] + + return response + +- response = json.load(urllib2.urlopen('http://api.flickr.com/services/rest/', parameters), object_hook = Response) ++ response = json.load(urllib.request.urlopen('http://api.flickr.com/services/rest/', parameters), object_hook = Response) + + if response.stat == 'ok': + self.__cache[parameters] = (time.time() + 60, response) + + return response + else: +- raise Failure, response ++ raise Failure(response) + + class Response: + def __init__(self, data): +@@ -158,7 +158,7 @@ class Response: + return self.__data[name] + + def __iter__(self): +- return self.__data.iteritems() ++ return iter(self.__data.items()) + + def __contains__(self, name): + return name in self.__data +@@ -230,7 +230,7 @@ def getBuddyiconURL(person, flickr = None): + flickr (Optional) + A Flickr API instance used to get a person response. + """ +- if isinstance(person, basestring): ++ if isinstance(person, str): + assert flickr is not None and isinstance(flickr, Flickr) + + person = flickr.people.getInfo(user_id = person).person diff --git a/www/py-flask-caching/Makefile b/www/py-flask-caching/Makefile new file mode 100644 index 00000000000..aa2fd7e11bc --- /dev/null +++ b/www/py-flask-caching/Makefile @@ -0,0 +1,21 @@ +# created by Rene Thuemmler + +PORTNAME= flask-caching +PORTVERSION= 1.9.0 +CATEGORIES= www python +MASTER_SITES= CHEESESHOP +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +DISTNAME= Flask-Caching-${PORTVERSION} + +MAINTAINER= rt@scientifics.de +COMMENT= Flask-Caching is an extension to Flask + +LICENSE= BSD3CLAUSE +LICENSE_FILE= ${WRKSRC}/LICENSE + +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}Flask>=0.8:www/py-flask@${PY_FLAVOR} + +USES= python:3.6+ +USE_PYTHON= autoplist distutils + +.include diff --git a/www/py-flask-caching/distinfo b/www/py-flask-caching/distinfo new file mode 100644 index 00000000000..42b410095d9 --- /dev/null +++ b/www/py-flask-caching/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1648414513 +SHA256 (Flask-Caching-1.9.0.tar.gz) = a0356ad868b1d8ec2d0e675a6fe891c41303128f8904d5d79e180d8b3f952aff +SIZE (Flask-Caching-1.9.0.tar.gz) = 71618 diff --git a/www/py-flask-caching/pkg-descr b/www/py-flask-caching/pkg-descr new file mode 100644 index 00000000000..ea108b1a9dd --- /dev/null +++ b/www/py-flask-caching/pkg-descr @@ -0,0 +1,5 @@ +Flask-Cache adds cache support to your Flask application. The cache +backend can Be chosen from simple, memcached, and filesystem. +Install databases/memcached from ports if needed. + +WWW: https://pythonhosted.org/Flask-Cache/ diff --git a/www/py-google-api-core/Makefile b/www/py-google-api-core/Makefile index efc19320e09..241a9a76c6e 100644 --- a/www/py-google-api-core/Makefile +++ b/www/py-google-api-core/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-api-core -PORTVERSION= 2.5.0 +PORTVERSION= 2.7.1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-google-api-core/distinfo b/www/py-google-api-core/distinfo index 65639b19310..f23d6e182a0 100644 --- a/www/py-google-api-core/distinfo +++ b/www/py-google-api-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058154 -SHA256 (google-api-core-2.5.0.tar.gz) = f33863a6709651703b8b18b67093514838c79f2b04d02aa501203079f24b8018 -SIZE (google-api-core-2.5.0.tar.gz) = 118867 +TIMESTAMP = 1647264696 +SHA256 (google-api-core-2.7.1.tar.gz) = b0fa577e512f0c8e063386b974718b8614586a798c5894ed34bedf256d9dae24 +SIZE (google-api-core-2.7.1.tar.gz) = 122056 diff --git a/www/py-google-cloud-appengine-logging/Makefile b/www/py-google-cloud-appengine-logging/Makefile index 1d556b38350..86ab67a27df 100644 --- a/www/py-google-cloud-appengine-logging/Makefile +++ b/www/py-google-cloud-appengine-logging/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-cloud-appengine-logging -PORTVERSION= 1.1.0 +PORTVERSION= 1.1.1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,7 +12,7 @@ COMMENT= Python Client for Google Cloud Appengine Logging LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.22.2<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}packaging>=14.3:devel/py-packaging@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.15.0:devel/py-proto-plus@${PY_FLAVOR} diff --git a/www/py-google-cloud-appengine-logging/distinfo b/www/py-google-cloud-appengine-logging/distinfo index d28861981e1..f0f96ac5ad7 100644 --- a/www/py-google-cloud-appengine-logging/distinfo +++ b/www/py-google-cloud-appengine-logging/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1635731011 -SHA256 (google-cloud-appengine-logging-1.1.0.tar.gz) = 91fe9b0833f6e1a46293dcc0e483716372c9ff4a95ebe51276c5f0092cb9defd -SIZE (google-cloud-appengine-logging-1.1.0.tar.gz) = 11777 +TIMESTAMP = 1647264698 +SHA256 (google-cloud-appengine-logging-1.1.1.tar.gz) = 361450ed7d7bfd8efd0ef253e21680aeb6f6df3cf057e5e8253f5851f8cbbca7 +SIZE (google-cloud-appengine-logging-1.1.1.tar.gz) = 11847 diff --git a/www/py-google-cloud-bigtable/Makefile b/www/py-google-cloud-bigtable/Makefile index eea7825bce0..6e13441d536 100644 --- a/www/py-google-cloud-bigtable/Makefile +++ b/www/py-google-cloud-bigtable/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-cloud-bigtable -PORTVERSION= 2.5.2 +PORTVERSION= 2.7.0 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,10 +12,10 @@ COMMENT= Google Cloud Bigtable API client library LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.28.0<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}google-cloud-core>=1.4.1<3.0.0:www/py-google-cloud-core@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}grpc-google-iam-v1>=0.12.3<0.13:devel/py-grpc-google-iam-v1@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.13.0:devel/py-proto-plus@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.15.0:devel/py-proto-plus@${PY_FLAVOR} USES= python:3.6+ USE_PYTHON= autoplist concurrent distutils diff --git a/www/py-google-cloud-bigtable/distinfo b/www/py-google-cloud-bigtable/distinfo index 06ed8bd0b24..82f73b42808 100644 --- a/www/py-google-cloud-bigtable/distinfo +++ b/www/py-google-cloud-bigtable/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058156 -SHA256 (google-cloud-bigtable-2.5.2.tar.gz) = da8b7c7b7e056c9e75741c88a7edd3ab43a082929b8ea6408f0371519b677f42 -SIZE (google-cloud-bigtable-2.5.2.tar.gz) = 273589 +TIMESTAMP = 1647264700 +SHA256 (google-cloud-bigtable-2.7.0.tar.gz) = 4d4802ffa219f64344e620ed6e128f1c9aa8decbfd5be9e5eca731b977eb6f23 +SIZE (google-cloud-bigtable-2.7.0.tar.gz) = 278167 diff --git a/www/py-google-cloud-core/Makefile b/www/py-google-cloud-core/Makefile index 33d05b46e59..cd82b399213 100644 --- a/www/py-google-cloud-core/Makefile +++ b/www/py-google-cloud-core/Makefile @@ -1,5 +1,5 @@ PORTNAME= google-cloud-core -PORTVERSION= 2.2.2 +PORTVERSION= 2.2.3 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,8 +10,8 @@ COMMENT= API Client library for Google Cloud: Core Helpers LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.21.0<3.0:www/py-google-api-core@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}google-auth>=1.24.0<3.0.0:security/py-google-auth@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0:www/py-google-api-core@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}google-auth>=1.25.0<3.0.0:security/py-google-auth@${PY_FLAVOR} USES= python:3.6+ USE_PYTHON= autoplist concurrent distutils diff --git a/www/py-google-cloud-core/distinfo b/www/py-google-cloud-core/distinfo index f7a8b3f720d..cdc41ad6401 100644 --- a/www/py-google-cloud-core/distinfo +++ b/www/py-google-cloud-core/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133805 -SHA256 (google-cloud-core-2.2.2.tar.gz) = 7d19bf8868b410d0bdf5a03468a3f3f2db233c0ee86a023f4ecc2b7a4b15f736 -SIZE (google-cloud-core-2.2.2.tar.gz) = 35004 +TIMESTAMP = 1647264702 +SHA256 (google-cloud-core-2.2.3.tar.gz) = 89d2f7189bc6dc74de128d423ea52cc8719f0a5dbccd9ca80433f6504a20255c +SIZE (google-cloud-core-2.2.3.tar.gz) = 35042 diff --git a/www/py-google-cloud-datastore/Makefile b/www/py-google-cloud-datastore/Makefile index 858d6e5f15f..5d3cd820367 100644 --- a/www/py-google-cloud-datastore/Makefile +++ b/www/py-google-cloud-datastore/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-cloud-datastore -PORTVERSION= 2.4.0 +PORTVERSION= 2.5.1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,14 +12,18 @@ COMMENT= Google Cloud Datastore API client library LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.28.0<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}google-cloud-core>=1.4.0<3.0:www/py-google-cloud-core@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}libcst>=0.2.5:devel/py-libcst@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.4.0:devel/py-proto-plus@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.15.0:devel/py-proto-plus@${PY_FLAVOR} USES= python:3.6+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +OPTIONS_DEFINE= LIBCST +LIBCST_DESC= Fixup scripts + +LIBCST_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}libcst>=0.2.5:devel/py-libcst@${PY_FLAVOR} + .include diff --git a/www/py-google-cloud-datastore/distinfo b/www/py-google-cloud-datastore/distinfo index 9dcab7edbc7..d3fa42f0ec4 100644 --- a/www/py-google-cloud-datastore/distinfo +++ b/www/py-google-cloud-datastore/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637073030 -SHA256 (google-cloud-datastore-2.4.0.tar.gz) = 4a6f04112f2685a0a5cd8c7cb7946572bb7e0f6ca7cbe0088514006fca8594ca -SIZE (google-cloud-datastore-2.4.0.tar.gz) = 147306 +TIMESTAMP = 1647264704 +SHA256 (google-cloud-datastore-2.5.1.tar.gz) = d5fa56a2fb20eb4bb2006a6cf502974dda95e16a9938ec9e0c5c57f187e000c7 +SIZE (google-cloud-datastore-2.5.1.tar.gz) = 153340 diff --git a/www/py-google-cloud-dlp/Makefile b/www/py-google-cloud-dlp/Makefile index c67892287c9..a21073ad56a 100644 --- a/www/py-google-cloud-dlp/Makefile +++ b/www/py-google-cloud-dlp/Makefile @@ -1,5 +1,5 @@ PORTNAME= google-cloud-dlp -PORTVERSION= 3.6.0 +PORTVERSION= 3.6.2 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -11,7 +11,7 @@ LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}proto-plus>=0.4.0:devel/py-proto-plus@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.15.0:devel/py-proto-plus@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pytz>=2021.1:devel/py-pytz@${PY_FLAVOR} USES= python:3.6+ diff --git a/www/py-google-cloud-dlp/distinfo b/www/py-google-cloud-dlp/distinfo index fa7066d33bf..5f9874af0d3 100644 --- a/www/py-google-cloud-dlp/distinfo +++ b/www/py-google-cloud-dlp/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643382568 -SHA256 (google-cloud-dlp-3.6.0.tar.gz) = 20abce8d8d3939db243cbc0da62a73ff1a4e3b3b341f7ced0cfeb5e2c4a66621 -SIZE (google-cloud-dlp-3.6.0.tar.gz) = 120900 +TIMESTAMP = 1647264706 +SHA256 (google-cloud-dlp-3.6.2.tar.gz) = 30c4dea320b7d0c5bd35dad72c0a9e94879e21db1d362eeee73c1584b79e4f29 +SIZE (google-cloud-dlp-3.6.2.tar.gz) = 122840 diff --git a/www/py-google-cloud-speech/Makefile b/www/py-google-cloud-speech/Makefile index 7932fa66a64..6c4209ec671 100644 --- a/www/py-google-cloud-speech/Makefile +++ b/www/py-google-cloud-speech/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= google-cloud-speech -PORTVERSION= 2.12.0 +PORTVERSION= 2.13.1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,13 +12,17 @@ COMMENT= Google Cloud Speech API client library LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.28.0<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}libcst>=0.2.5:devel/py-libcst@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.4.0:devel/py-proto-plus@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.15.0:devel/py-proto-plus@${PY_FLAVOR} USES= python:3.6+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes +OPTIONS_DEFINE= LIBCST +LIBCST_DESC= Fixup scripts + +LIBCST_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}libcst>=0.2.5:devel/py-libcst@${PY_FLAVOR} + .include diff --git a/www/py-google-cloud-speech/distinfo b/www/py-google-cloud-speech/distinfo index cf27d4e6554..611240b2b01 100644 --- a/www/py-google-cloud-speech/distinfo +++ b/www/py-google-cloud-speech/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133809 -SHA256 (google-cloud-speech-2.12.0.tar.gz) = acbf9948ce3870c72b45089356985de9df3cd881830d1127a10cb80ada9786c7 -SIZE (google-cloud-speech-2.12.0.tar.gz) = 106103 +TIMESTAMP = 1647264708 +SHA256 (google-cloud-speech-2.13.1.tar.gz) = 2718480b838cb178dd258343a261219873d40b3bde152d287835ac5ffc1de730 +SIZE (google-cloud-speech-2.13.1.tar.gz) = 116199 diff --git a/www/py-google-cloud-vision/Makefile b/www/py-google-cloud-vision/Makefile index aec93aa9bf6..17aa4715422 100644 --- a/www/py-google-cloud-vision/Makefile +++ b/www/py-google-cloud-vision/Makefile @@ -1,5 +1,5 @@ PORTNAME= google-cloud-vision -PORTVERSION= 2.6.3 +PORTVERSION= 2.7.1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,7 +10,7 @@ COMMENT= Cloud Vision API API client library LICENSE= APACHE20 LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.28.0<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}google-api-core>=1.31.5<3.0.0:www/py-google-api-core@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}proto-plus>=1.15.0:devel/py-proto-plus@${PY_FLAVOR} USES= python:3.6+ @@ -19,7 +19,7 @@ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes OPTIONS_DEFINE= LIBCST -LIBCST_DESC= Concrete Syntax Tree (CST) support +LIBCST_DESC= Fixup scripts LIBCST_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}libcst>=0.2.5:devel/py-libcst@${PY_FLAVOR} diff --git a/www/py-google-cloud-vision/distinfo b/www/py-google-cloud-vision/distinfo index 443acab2bd7..091942a41c5 100644 --- a/www/py-google-cloud-vision/distinfo +++ b/www/py-google-cloud-vision/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641046030 -SHA256 (google-cloud-vision-2.6.3.tar.gz) = 54b7f63c746ab95a504bd6b9b1d806192483976a3452a1a59a7faa0eaaa03491 -SIZE (google-cloud-vision-2.6.3.tar.gz) = 640629 +TIMESTAMP = 1647264710 +SHA256 (google-cloud-vision-2.7.1.tar.gz) = 4f2f340bbdcadff4de6ec7521e29e6a76cbc4ca80e7eda93e4f20369fab51a03 +SIZE (google-cloud-vision-2.7.1.tar.gz) = 658706 diff --git a/www/py-google-resumable-media/Makefile b/www/py-google-resumable-media/Makefile index 0818a890f03..7b9ccb94f2c 100644 --- a/www/py-google-resumable-media/Makefile +++ b/www/py-google-resumable-media/Makefile @@ -1,5 +1,5 @@ PORTNAME= google-resumable-media -PORTVERSION= 2.3.0 +PORTVERSION= 2.3.2 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-google-resumable-media/distinfo b/www/py-google-resumable-media/distinfo index 6dca2776395..bedf5fd018c 100644 --- a/www/py-google-resumable-media/distinfo +++ b/www/py-google-resumable-media/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058160 -SHA256 (google-resumable-media-2.3.0.tar.gz) = 1a7dce5790b04518edc02c2ce33965556660d64957106d66a945086e2b642572 -SIZE (google-resumable-media-2.3.0.tar.gz) = 2152763 +TIMESTAMP = 1647264712 +SHA256 (google-resumable-media-2.3.2.tar.gz) = 06924e8b1e79f158f0202e7dd151ad75b0ea9d59b997c850f56bdd4a5a361513 +SIZE (google-resumable-media-2.3.2.tar.gz) = 2152842 diff --git a/www/py-habanero/Makefile b/www/py-habanero/Makefile index 3fd6c219a7d..159e087ce39 100644 --- a/www/py-habanero/Makefile +++ b/www/py-habanero/Makefile @@ -1,9 +1,13 @@ PORTNAME= habanero DISTVERSION= 1.0.0 +PORTREVISION= 1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +PATCH_SITES= https://github.com/sckott/habanero/commit/ +PATCHFILES= e50874fbea1944bd6e4f7bb97743200bdd7c7e9e.patch:-p1 + MAINTAINER= yuri@FreeBSD.org COMMENT= Low-level client for Crossref search API diff --git a/www/py-habanero/distinfo b/www/py-habanero/distinfo index 2de6c4c3848..ce3811859ac 100644 --- a/www/py-habanero/distinfo +++ b/www/py-habanero/distinfo @@ -1,3 +1,5 @@ -TIMESTAMP = 1636923130 +TIMESTAMP = 1648227981 SHA256 (habanero-1.0.0.tar.gz) = 2763debc455841e28715ff20c9884249e1b2eecfd22c20967928e1eb657a0fe5 SIZE (habanero-1.0.0.tar.gz) = 45676 +SHA256 (e50874fbea1944bd6e4f7bb97743200bdd7c7e9e.patch) = 3fa980b4ab0cba86386248ac030fa937de5da572e9df94195bbbfaa591a1ccb1 +SIZE (e50874fbea1944bd6e4f7bb97743200bdd7c7e9e.patch) = 740 diff --git a/www/py-html5-parser/Makefile b/www/py-html5-parser/Makefile index fb1e1e1de43..2e6c59490d9 100644 --- a/www/py-html5-parser/Makefile +++ b/www/py-html5-parser/Makefile @@ -1,5 +1,6 @@ PORTNAME= html5-parser PORTVERSION= 0.4.10 +PORTREVISION= 1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-instabot/Makefile b/www/py-instabot/Makefile index 334cf534b06..4baeb54fa1f 100644 --- a/www/py-instabot/Makefile +++ b/www/py-instabot/Makefile @@ -2,7 +2,7 @@ PORTNAME= instabot PORTVERSION= 0.117.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-instabot/files/patch-setup.py b/www/py-instabot/files/patch-setup.py new file mode 100644 index 00000000000..e9d22682bd7 --- /dev/null +++ b/www/py-instabot/files/patch-setup.py @@ -0,0 +1,9 @@ +--- setup.py.orig 2020-03-28 11:06:35 UTC ++++ setup.py +@@ -55,5 +55,5 @@ setup( + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + ], +- packages=find_packages(), ++ packages=find_packages(exclude=['tests*']), + ) diff --git a/www/py-jonpy/files/patch-2to3 b/www/py-jonpy/files/patch-2to3 new file mode 100644 index 00000000000..4f80a0eb173 --- /dev/null +++ b/www/py-jonpy/files/patch-2to3 @@ -0,0 +1,398 @@ +--- jon/cgi.py.orig 2012-04-04 15:01:19 UTC ++++ jon/cgi.py +@@ -1,10 +1,10 @@ + # $Id: cgi.py,v 9f23fa584882 2012/04/04 15:01:19 jon $ + +-import sys, re, os, Cookie, errno ++import sys, re, os, http.cookies, errno + try: +- import cStringIO as StringIO ++ import io as StringIO + except ImportError: +- import StringIO ++ import io + + """Object-oriented CGI interface.""" + +@@ -38,13 +38,13 @@ def html_encode(raw): + Specifically, the following characters are encoded as entities: + & < > " ' + + """ +- if not isinstance(raw, (str, unicode)): ++ if not isinstance(raw, str): + raw = str(raw) + return re.sub(_html_encre, lambda m: _html_encodes[m.group(0)], raw) + + def url_encode(raw): + """Return the string parameter URL-encoded.""" +- if not isinstance(raw, (str, unicode)): ++ if not isinstance(raw, str): + raw = str(raw) + return re.sub(_url_encre, lambda m: "%%%02X" % ord(m.group(0)), raw) + +@@ -69,7 +69,7 @@ def traceback(req, html=0): + except SequencingError: + pass + cgitb.Hook(file=req)(*exc) +- s = StringIO.StringIO() ++ s = io.StringIO() + cgitb.Hook(file=s, format="text")(*exc) + req.error(s.getvalue()) + +@@ -90,7 +90,7 @@ class Request(object): + self._doneHeaders = 0 + self._headers = [] + self._bufferOutput = 1 +- self._output = StringIO.StringIO() ++ self._output = io.StringIO() + self._pos = 0 + self.closed = 0 + self._encoding = self._inputencoding = self._form_encoding = None +@@ -98,7 +98,7 @@ class Request(object): + del self.params + except AttributeError: + pass +- self.cookies = Cookie.SimpleCookie() ++ self.cookies = http.cookies.SimpleCookie() + if "HTTP_COOKIE" in self.environ: + self.cookies.load(self.environ["HTTP_COOKIE"]) + self.aborted = 0 +@@ -263,11 +263,11 @@ class Request(object): + """Sends some data to the client.""" + self._check_open() + if self._encoding: +- if not isinstance(s, unicode): ++ if not isinstance(s, str): + if self._inputencoding: +- s = unicode(s, self._inputencoding) ++ s = str(s, self._inputencoding) + else: +- s = unicode(s) ++ s = str(s) + s = s.encode(self._encoding) + else: + s = str(s) +@@ -347,7 +347,7 @@ class Request(object): + placed in the dictionary will be a list. This is useful for multiple-value + fields. If the variable name ends with a '!' character (before the '*' if + present) then the value will be a mime.Entity object.""" +- import mime ++ from . import mime + headers = "Content-Type: %s\n" % contenttype + for entity in mime.Entity(encoded.read(), mime=1, headers=headers).entities: + if not entity.content_disposition: +@@ -504,7 +504,7 @@ class CGIRequest(Request): + if not self.aborted: + try: + self.__out.write(s) +- except IOError, x: ++ except IOError as x: + # Ignore EPIPE, caused by the browser having gone away + if x[0] != errno.EPIPE: + raise +@@ -514,7 +514,7 @@ class CGIRequest(Request): + if not self.aborted: + try: + self.__out.flush() +- except IOError, x: ++ except IOError as x: + # Ignore EPIPE, caused by the browser having gone away + if x[0] != errno.EPIPE: + raise +--- jon/fcgi.py.orig 2010-03-20 19:53:55 UTC ++++ jon/fcgi.py +@@ -1,7 +1,7 @@ + # $Id: fcgi.py,v 5efd11fe0588 2010/03/20 18:27:40 jribbens $ + + import struct, socket, sys, errno, os, select +-import cgi, fakefile ++from . import cgi, fakefile + + log_level = 0 + log_name = "/tmp/fcgi.log" +@@ -184,8 +184,8 @@ class Connection(object): + self.params = params + self.threading_level = threading_level + if self.threading_level > 1: +- import thread +- self.socketlock = thread.allocate_lock() ++ import _thread ++ self.socketlock = _thread.allocate_lock() + else: + self.socketlock = None + +@@ -216,9 +216,9 @@ class Connection(object): + self.socketlock.release() + else: + self.socket.sendall(rec.encode()) +- except socket.error, x: ++ except socket.error as x: + if x[0] == errno.EPIPE: +- for req in self.requests.values(): ++ for req in list(self.requests.values()): + req.aborted = 2 + else: + raise +@@ -242,7 +242,7 @@ class Connection(object): + if isinstance(x, (EOFError, ValueError)) or \ + (isinstance(x, socket.error) and x[0] == errno.EBADF): + self.log(2, 0, "EOF received on connection") +- for req in self.requests.values(): ++ for req in list(self.requests.values()): + req.aborted = 2 + break + else: +@@ -314,8 +314,8 @@ class Connection(object): + self.log(3, rec.request_id, "< FCGI_PARAMS: ") + if self.threading_level > 1: + self.log(2, rec.request_id, "starting request thread") +- import thread +- thread.start_new_thread(req.run, ()) ++ import _thread ++ _thread.start_new_thread(req.run, ()) + else: + self.log(2, rec.request_id, "executing request") + req.run() +@@ -417,7 +417,7 @@ class Request(cgi.Request): + 0, 0, 0) + self.log(2, "> FCGI_END_REQUEST") + self.__connection.write(rec) +- except IOError, x: ++ except IOError as x: + if x[0] == errno.EPIPE: + self.log(2, "EPIPE during request finalisation") + else: +@@ -457,7 +457,7 @@ class Request(cgi.Request): + pos += len(rec.content_data) + try: + self.__connection.write(rec) +- except IOError, x: ++ except IOError as x: + if x[0] == errno.EPIPE: + self.aborted = 2 + self.log(2, "Aborted due to EPIPE") +@@ -480,9 +480,9 @@ class Server(object): + self.log(2, "theading_level = %d" % threading_level) + if threading_level > 0: + try: +- import thread +- log_lock = thread.allocate_lock() +- except ImportError, x: ++ import _thread ++ log_lock = _thread.allocate_lock() ++ except ImportError as x: + threading_level = 0 + self.log(2, "cannot import thread (%s), disabling threading" % str(x)) + self.threading_level = threading_level +@@ -505,7 +505,7 @@ class Server(object): + socket.SOCK_STREAM) + try: + self._sock.getpeername() +- except socket.error, x: ++ except socket.error as x: + if x[0] != errno.ENOTSOCK and x[0] != errno.ENOTCONN: + raise + if x[0] == errno.ENOTSOCK: +@@ -522,7 +522,7 @@ class Server(object): + # OSes) + select.select([self._sock], [], []) + (newsock, addr) = self._sock.accept() +- except socket.error, x: ++ except socket.error as x: + if x[0] == errno.EBADF: + break + raise +@@ -537,8 +537,8 @@ class Server(object): + self.params, self.threading_level) + del newsock + if self.threading_level > 0: +- import thread +- thread.start_new_thread(conn.run, ()) ++ import _thread ++ _thread.start_new_thread(conn.run, ()) + else: + conn.run() + if self.max_requests > 0: +--- jon/session.py.orig 2011-05-27 18:04:15 UTC ++++ jon/session.py +@@ -1,6 +1,6 @@ + # $Id: session.py,v 2bbf755d62ad 2011/05/27 18:03:28 jon $ + +-import time, hmac, Cookie, re, random, os, errno, fcntl ++import time, hmac, http.cookies, re, random, os, errno, fcntl + try: + import hashlib + sha = hashlib.sha1 +@@ -9,7 +9,7 @@ except ImportError: + import sha + shanew = sha.new + try: +- import cPickle as pickle ++ import pickle as pickle + except ImportError: + import pickle + +@@ -127,7 +127,7 @@ class Session(dict): + if "hash" not in self: + self["hash"] = self._make_hash(self["id"], self._secret) + if self.cookie: +- c = Cookie.SimpleCookie() ++ c = http.cookies.SimpleCookie() + c[self.cookie] = self["id"] + self["hash"] + c[self.cookie]["path"] = self.root + "/" + if self.secure: +@@ -191,13 +191,13 @@ class FileSession(Session): + Session._create(self, secret) + try: + os.lstat("%s/%s" % (self.basedir, self["id"][:2])) +- except OSError, x: ++ except OSError as x: + if x[0] == errno.ENOENT: +- os.mkdir("%s/%s" % (self.basedir, self["id"][:2]), 0700) ++ os.mkdir("%s/%s" % (self.basedir, self["id"][:2]), 0o700) + try: + fd = os.open("%s/%s/%s" % (self.basedir, self["id"][:2], +- self["id"][2:]), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0700) +- except OSError, x: ++ self["id"][2:]), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o700) ++ except OSError as x: + if x[0] != errno.EEXIST: + raise + continue +@@ -211,7 +211,7 @@ class FileSession(Session): + try: + f = open("%s/%s/%s" % (self.basedir, self["id"][:2], self["id"][2:]), + "r+b") +- except IOError, x: ++ except IOError as x: + if x[0] != errno.ENOENT: + raise + return 0 +@@ -255,9 +255,9 @@ class FileSession(Session): + st = os.lstat(basedir) + if st[4] != os.getuid(): + raise Error("Sessions basedir is not owned by user %d" % os.getuid()) +- except OSError, x: ++ except OSError as x: + if x[0] == errno.ENOENT: +- os.mkdir(basedir, 0700) ++ os.mkdir(basedir, 0o700) + return basedir + _find_basedir = staticmethod(_find_basedir) + +@@ -322,19 +322,19 @@ class MySQLSession(GenericSQLSession): + while 1: + Session._create(self, secret) + self.dbc.execute("SELECT 1 FROM %s WHERE ID=%%s" % (self.table,), +- (long(self["id"], 16),)) ++ (int(self["id"], 16),)) + if self.dbc.rowcount == 0: + break + self["hash"] = self._make_hash(self["id"], secret) + self.dbc.execute("INSERT INTO %s (ID,hash,created,updated,data) VALUES " \ + "(%%s,%%s,%%s,%%s,%%s)" % (self.table,), +- (long(self["id"], 16), self["hash"], int(self.created), ++ (int(self["id"], 16), self["hash"], int(self.created), + int(self.created), pickle.dumps({}, 1))) + self.dbc.execute("UNLOCK TABLES") + + def _load(self): + self.dbc.execute("SELECT created,data FROM %s WHERE ID=%%s" % (self.table,), +- (long(self["id"], 16),)) ++ (int(self["id"], 16),)) + if self.dbc.rowcount == 0: + return 0 + row = self.dbc.fetchone() +@@ -345,7 +345,7 @@ class MySQLSession(GenericSQLSession): + def save(self): + self.dbc.execute("UPDATE %s SET updated=%%s,data=%%s" + " WHERE ID=%%s" % (self.table,), (int(time.time()), +- pickle.dumps(self.copy(), 1), long(self["id"], 16))) ++ pickle.dumps(self.copy(), 1), int(self["id"], 16))) + + + SQLSession = MySQLSession # backwards compatibility name +--- jon/wsgi.py.orig 2010-03-23 01:01:42 UTC ++++ jon/wsgi.py +@@ -1,6 +1,6 @@ + # $Id: wsgi.py,v a9ebd961ae72 2010/03/23 01:01:42 jon $ + +-import cgi, fakefile ++from . import cgi, fakefile + + + # classes involving calling jonpy-style handlers from WSGI server connectors +@@ -64,7 +64,7 @@ class Application(object): + + class Handler(cgi.Handler): + def process(self, req): +- environ = dict(req.environ.items()) ++ environ = dict(list(req.environ.items())) + environ["wsgi.version"] = (1, 0) + environ["wsgi.input"] = req + environ["wsgi.errors"] = fakefile.FakeOutput(req.error) +@@ -81,7 +81,7 @@ class Handler(cgi.Handler): + if exc_info: + try: + if req.get_header("Status") is not None: +- raise exc_info[0], exc_info[1], exc_info[2] ++ raise exc_info[0](exc_info[1]).with_traceback(exc_info[2]) + finally: + exc_info = None + elif req.get_header("Status") is not None: +--- jon/wt/__init__.py.orig 2010-03-20 19:53:55 UTC ++++ jon/wt/__init__.py +@@ -43,7 +43,7 @@ def replace(wt, template, namespace): + out = getattr(namespace, chunk)() + else: + out = getattr(namespace, chunk) +- if not isinstance(out, unicode): ++ if not isinstance(out, str): + out = str(out) + wt.req.write(encode(out)) + +@@ -79,7 +79,7 @@ class GlobalTemplate(TemplateCode): + if encoding is None: + self.process(open(self.template_name(), "rb").read()) + else: +- self.process(unicode(open(self.template_name(), "rb").read(), ++ self.process(str(open(self.template_name(), "rb").read(), + encoding)) + + def template_name(self): +@@ -94,7 +94,7 @@ class GlobalTemplate(TemplateCode): + # is substituted instead + obj = self.outer.page(self.outer) + if obj.template_as_file: +- import StringIO as cStringIO ++ import io as cStringIO + obj.main(StringIO.StringIO(self.outer._pageTemplate)) + else: + obj.main(self.outer._pageTemplate) +@@ -123,7 +123,7 @@ def process(wt, template, namespace, selected=None): + (type(selected) == type(()) and name in selected)): + obj = getattr(namespace, name)(namespace, wt) + if obj.template_as_file: +- import cStringIO as StringIO ++ import io as StringIO + obj.main(StringIO.StringIO(template[start.end():end])) + else: + obj.main(template[start.end():end]) +@@ -177,7 +177,7 @@ class Handler(cgi.Handler): + except KeyError: + namespace = { "wt": sys.modules[__name__] } + code = compile(open(codefname, "r").read(), codefname, "exec") +- exec code in namespace ++ exec(code, namespace) + del code + if self.cache_code: + _code_cache[codefname] = namespace +@@ -190,7 +190,7 @@ class Handler(cgi.Handler): + if encoding is None: + obj.main(open(self.template, "rb").read()) + else: +- obj.main(unicode(open(self.template, "rb").read(), encoding)) ++ obj.main(str(open(self.template, "rb").read(), encoding)) + self.post_request(obj) + + diff --git a/www/py-nevow/files/patch-2to3 b/www/py-nevow/files/patch-2to3 new file mode 100644 index 00000000000..f669cb5b5ea --- /dev/null +++ b/www/py-nevow/files/patch-2to3 @@ -0,0 +1,4233 @@ +--- formless/annotate.py.orig 2015-10-20 22:44:09 UTC ++++ formless/annotate.py +@@ -22,11 +22,11 @@ from formless import iformless + class count(object): + def __init__(self): + self.id = 0 +- def next(self): ++ def __next__(self): + self.id += 1 + return self.id + +-nextId = count().next ++nextId = count().__next__ + + + class InputError(Exception): +@@ -102,7 +102,7 @@ class Typed(Attribute): + required = False + requiredFailMessage = 'Please enter a value' + null = None +- unicode = False ++ str = False + + __name__ = '' + +@@ -114,7 +114,7 @@ class Typed(Attribute): + required=None, + requiredFailMessage=None, + null=None, +- unicode=None, ++ str=None, + **attributes): + + self.id = nextId() +@@ -130,15 +130,15 @@ class Typed(Attribute): + self.requiredFailMessage = requiredFailMessage + if null is not None: + self.null = null +- if unicode is not None: +- self.unicode = unicode ++ if str is not None: ++ self.str = str + self.attributes = attributes + + def getAttribute(self, name, default=None): + return self.attributes.get(name, default) + + def coerce(self, val, configurable): +- raise NotImplementedError, "Implement in %s" % util.qual(self.__class__) ++ raise NotImplementedError("Implement in %s" % util.qual(self.__class__)) + + + ####################################### +@@ -209,7 +209,7 @@ class Integer(Typed): + except ValueError: + if sys.version_info < (2,3): # Long/Int aren't integrated + try: +- return long(val) ++ return int(val) + except ValueError: + raise InputError("'%s' is not an integer." % val) + +@@ -540,7 +540,7 @@ class Binding(object): + return self.original.__class__.__name__.lower() + + def configure(self, boundTo, results): +- raise NotImplementedError, "Implement in %s" % util.qual(self.__class__) ++ raise NotImplementedError("Implement in %s" % util.qual(self.__class__)) + + def coerce(self, val, configurable): + if hasattr(self.original, 'coerce'): +@@ -617,7 +617,7 @@ class GroupBinding(Binding): + self.complexType = typedValue.complexType + + def configure(self, boundTo, group): +- print "CONFIGURING GROUP BINDING", boundTo, group ++ print("CONFIGURING GROUP BINDING", boundTo, group) + + + def _sorter(x, y): +@@ -670,7 +670,7 @@ def nameToLabel(mname): + def labelAndDescriptionFromDocstring(docstring): + if docstring is None: + docstring = '' +- docs = filter(lambda x: x, [x.strip() for x in docstring.split('\n')]) ++ docs = [x for x in [x.strip() for x in docstring.split('\n')] if x] + if len(docs) > 1: + return docs[0], '\n'.join(docs[1:]) + else: +@@ -723,7 +723,7 @@ class MetaTypedInterface(InterfaceClass): + cls.complexType = True + possibleActions = [] + actionAttachers = [] +- for key, value in dct.items(): ++ for key, value in list(dct.items()): + if key[0] == '_': continue + + if isinstance(value, MetaTypedInterface): +--- formless/configurable.py.orig 2015-10-20 22:44:09 UTC ++++ formless/configurable.py +@@ -66,7 +66,7 @@ class Configurable(object): + try: + binding = self.bindingDict[name] + except KeyError: +- raise RuntimeError, "%s is not an exposed binding on object %s." % (name, self.boundTo) ++ raise RuntimeError("%s is not an exposed binding on object %s." % (name, self.boundTo)) + binding.boundTo = self.boundTo + return binding + +@@ -125,7 +125,7 @@ class Configurable(object): + + class NotFoundConfigurable(Configurable): + def getBinding(self, context, name): +- raise RuntimeError, self.original ++ raise RuntimeError(self.original) + + + class TypedInterfaceConfigurable(Configurable): +--- formless/processors.py.orig 2015-10-20 22:44:09 UTC ++++ formless/processors.py +@@ -21,7 +21,7 @@ faketag = tags.html() + def exceptblock(f, handler, exception, *a, **kw): + try: + result = f(*a, **kw) +- except exception, e: ++ except exception as e: + return handler(e) + if isinstance(result, Deferred): + def _(fail): +@@ -91,7 +91,7 @@ class ProcessMethodBinding(components.Adapter): + typedValue = self.original.typedValue + results = {} + failures = {} +- if data.has_key('----'): ++ if '----' in data: + ## ---- is the "direct object", the one argument you can specify using the command line without saying what the argument name is + data[typedValue.arguments[0].name] = data['----'] + del data['----'] +@@ -101,7 +101,7 @@ class ProcessMethodBinding(components.Adapter): + context = WovenContext(context, faketag) + context.remember(binding, iformless.IBinding) + results[name] = iformless.IInputProcessor(binding.typedValue).process(context, boundTo, data.get(name, [''])) +- except formless.InputError, e: ++ except formless.InputError as e: + results[name] = data.get(name, [''])[0] + failures[name] = e.reason + +@@ -130,14 +130,14 @@ class ProcessPropertyBinding(components.Adapter): + result = {} + try: + result[binding.name] = iformless.IInputProcessor(binding.typedValue).process(context, boundTo, data.get(binding.name, [''])) +- except formless.InputError, e: ++ except formless.InputError as e: + result[binding.name] = data.get(binding.name, ['']) + raise formless.ValidateError({binding.name: e.reason}, e.reason, result) + + if autoConfigure: + try: + return self.original.configure(boundTo, result) +- except formless.InputError, e: ++ except formless.InputError as e: + result[binding.name] = data.get(binding.name, ['']) + raise formless.ValidateError({binding.name: e.reason}, e.reason, result) + return result +@@ -150,7 +150,7 @@ class ProcessTyped(components.Adapter): + """ + typed = self.original + val = data[0] +- if typed.unicode: ++ if typed.str: + try: + val = val.decode(getPOSTCharset(context), 'replace') + except LookupError: +@@ -164,7 +164,7 @@ class ProcessTyped(components.Adapter): + return typed.null + try: + return typed.coerce(val, boundTo) +- except TypeError, e: ++ except TypeError as e: + warnings.warn('Typed.coerce takes two values now, the value to coerce and the configurable in whose context the coerce is taking place. %s %s' % (typed.__class__, typed)) + return typed.coerce(val) + +@@ -190,7 +190,7 @@ class ProcessPassword(components.Adapter): + else: + return typed.null + val = data[0] +- if typed.unicode: ++ if typed.str: + try: + val = val.decode(getPOSTCharset(context), 'replace') + except LookupError: +--- formless/webform.py.orig 2015-10-20 22:44:09 UTC ++++ formless/webform.py +@@ -4,8 +4,8 @@ + # See LICENSE for details. + + +-from __future__ import generators + ++ + import warnings + from zope.interface import implements, Interface + +@@ -66,7 +66,7 @@ class BaseInputRenderer(components.Adapter): + return context.tag + + def input(self, context, slot, data, name, value): +- raise NotImplementedError, "Implement in subclass" ++ raise NotImplementedError("Implement in subclass") + + class PasswordRenderer(BaseInputRenderer): + def input(self, context, slot, data, name, value): +@@ -437,7 +437,7 @@ def renderForms(configurableKey='', bindingNames=None, + if bindingDefaults is None: + available = configurable.getBindingNames(context) + else: +- available = bindingDefaults.iterkeys() ++ available = iter(bindingDefaults.keys()) + + def _callback(binding): + renderer = iformless.IBindingRenderer(binding, defaultBindingRenderer) +--- nevow/_flat.py.orig 2016-05-08 19:28:50 UTC ++++ nevow/_flat.py +@@ -61,7 +61,7 @@ class FlattenerError(Exception): + @return: A string representation of C{obj}. + @rtype: L{str} + """ +- if isinstance(obj, (str, unicode)): ++ if isinstance(obj, str): + # It's somewhat unlikely that there will ever be a str in the roots + # list. However, something like a MemoryError during a str.replace + # call (eg, replacing " with ") could possibly cause this. +@@ -175,7 +175,7 @@ def _ctxForRequest(request, slotData, renderFactory, i + ctx.remember(request, IRequest) + for slotGroup in slotData: + if slotGroup is not None: +- for k, v in slotGroup.items(): ++ for k, v in list(slotGroup.items()): + ctx.fillSlots(k, v) + if renderFactory is not None: + ctx.remember(_OldRendererFactory(renderFactory), IRendererFactory) +@@ -224,7 +224,7 @@ def _flatten(request, write, root, slotData, renderFac + @return: An iterator which yields C{str}, L{Deferred}, and more iterators + of the same type. + """ +- if isinstance(root, unicode): ++ if isinstance(root, str): + root = root.encode('utf-8') + elif isinstance(root, WovenContext): + # WovenContext is supported via the getFlattener case, but that is a +@@ -268,13 +268,13 @@ def _flatten(request, write, root, slotData, renderFac + False, True) + else: + write('<') +- if isinstance(root.tagName, unicode): ++ if isinstance(root.tagName, str): + tagName = root.tagName.encode('ascii') + else: + tagName = str(root.tagName) + write(tagName) +- for k, v in sorted(root.attributes.iteritems()): +- if isinstance(k, unicode): ++ for k, v in sorted(root.attributes.items()): ++ if isinstance(k, str): + k = k.encode('ascii') + write(" " + k + "=\"") + yield _flatten(request, write, v, slotData, +@@ -310,7 +310,7 @@ def _flatten(request, write, root, slotData, renderFac + write(root.num) + write(';') + elif isinstance(root, xml): +- if isinstance(root.content, unicode): ++ if isinstance(root.content, str): + write(root.content.encode('utf-8')) + else: + write(root.content) +@@ -409,10 +409,10 @@ def flatten(request, write, root, inAttribute, inXML): + # In Python 2.5, after an exception, a generator's gi_frame is + # None. + frame = stack[-1].gi_frame +- element = stack[-1].next() ++ element = next(stack[-1]) + except StopIteration: + stack.pop() +- except Exception, e: ++ except Exception as e: + stack.pop() + roots = [] + for generator in stack: +@@ -423,7 +423,8 @@ def flatten(request, write, root, inAttribute, inXML): + if type(element) is str: + write(element) + elif isinstance(element, Deferred): +- def cbx((original, toFlatten)): ++ def cbx(xxx_todo_changeme): ++ (original, toFlatten) = xxx_todo_changeme + stack.append(toFlatten) + return original + yield element.addCallback(cbx) +@@ -456,7 +457,7 @@ def _flattensome(state, write, schedule, result): + """ + while True: + try: +- element = state.next() ++ element = next(state) + except StopIteration: + result.callback(None) + except: +--- nevow/accessors.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/accessors.py +@@ -48,7 +48,7 @@ class DirectiveAccessor(tpc.Adapter): + data = context.locate(IData) + container = IContainer(data, None) + if container is None: +- raise NoAccessor, "%r does not implement IContainer, and there is no registered adapter." % data ++ raise NoAccessor("%r does not implement IContainer, and there is no registered adapter." % data) + child = container.child(context, self.original.name) + return child + +--- nevow/athena.py.orig 2016-05-08 19:28:50 UTC ++++ nevow/athena.py +@@ -1,6 +1,6 @@ + # -*- test-case-name: nevow.test.test_athena -*- + +-import itertools, os, re, warnings, StringIO ++import itertools, os, re, warnings, io + + from zope.interface import implements + +@@ -49,7 +49,7 @@ class LivePageError(Exception): + """ + Base exception for LivePage errors. + """ +- jsClass = u'Divmod.Error' ++ jsClass = 'Divmod.Error' + + + +@@ -58,7 +58,7 @@ class NoSuchMethod(LivePageError): + Raised when an attempt is made to invoke a method which is not defined or + exposed. + """ +- jsClass = u'Nevow.Athena.NoSuchMethod' ++ jsClass = 'Nevow.Athena.NoSuchMethod' + + def __init__(self, objectID, methodName): + self.objectID = objectID +@@ -186,7 +186,7 @@ class AthenaModule(object): + Calculate our dependencies given the path to our source. + """ + depgen = self._extractImports(file(jsFile, 'rU')) +- return self.packageDeps + dict.fromkeys(depgen).keys() ++ return self.packageDeps + list(dict.fromkeys(depgen).keys()) + + + def dependencies(self): +@@ -300,7 +300,7 @@ def _collectPackageBelow(baseDir, extension): + path = os.path.join(root, dir, '__init__.' + extension) + if not os.path.exists(path): + path = EMPTY +- mapping[unicode(name, 'ascii')] = path ++ mapping[str(name, 'ascii')] = path + _revMap[os.path.join(root, dir)] = name + '.' + + for fn in filenames: +@@ -315,7 +315,7 @@ def _collectPackageBelow(baseDir, extension): + + name = stem + fn[:-(len(extension) + 1)] + path = os.path.join(root, fn) +- mapping[unicode(name, 'ascii')] = path ++ mapping[str(name, 'ascii')] = path + return mapping + + +@@ -540,11 +540,11 @@ def getJSFailure(exc, modules): + """ + Convert a serialized client-side exception to a Failure. + """ +- text = '%s: %s' % (exc[u'name'], exc[u'message']) ++ text = '%s: %s' % (exc['name'], exc['message']) + + frames = [] +- if u'stack' in exc: +- frames = parseStack(exc[u'stack']) ++ if 'stack' in exc: ++ frames = parseStack(exc['stack']) + + return failure.Failure(JSException(text), exc_tb=buildTraceback(frames, modules)) + +@@ -618,8 +618,8 @@ class ConnectionLost(Exception): + pass + + +-CLOSE = u'close' +-UNLOAD = u'unload' ++CLOSE = 'close' ++UNLOAD = 'unload' + + class ReliableMessageDelivery(object): + """ +@@ -775,8 +775,8 @@ class ReliableMessageDelivery(object): + + + def _unregisterDeferredAsOutputChannel(self, deferred): +- for i in xrange(len(self.outputs)): +- if self.outputs[i][0].im_self is deferred: ++ for i in range(len(self.outputs)): ++ if self.outputs[i][0].__self__ is deferred: + output, timeout = self.outputs.pop(i) + timeout.cancel() + break +@@ -1007,7 +1007,7 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + @ivar _localObjectIDCounter: A callable that will return a new + locally-unique object ID each time it is called. + """ +- jsClass = u'Nevow.Athena.PageWidget' ++ jsClass = 'Nevow.Athena.PageWidget' + cssModule = None + + factory = LivePageFactory() +@@ -1140,7 +1140,7 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + if self.cssModuleRoot is None: + self.cssModuleRoot = location.child(self.clientID).child('cssmodule') + +- self._requestIDCounter = itertools.count().next ++ self._requestIDCounter = itertools.count().__next__ + + self._messageDeliverer = ReliableMessageDelivery( + self, +@@ -1151,7 +1151,7 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + connectionMade=self._connectionMade) + self._remoteCalls = {} + self._localObjects = {} +- self._localObjectIDCounter = itertools.count().next ++ self._localObjectIDCounter = itertools.count().__next__ + + self.addLocalObject(self) + +@@ -1252,7 +1252,7 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + """ + Invoke connectionMade on all attached widgets. + """ +- for widget in self._localObjects.values(): ++ for widget in list(self._localObjects.values()): + widget.connectionMade() + self._didConnect = True + +@@ -1274,10 +1274,10 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + d.errback(reason) + calls = self._remoteCalls + self._remoteCalls = {} +- for (reqID, resD) in calls.iteritems(): ++ for (reqID, resD) in calls.items(): + resD.errback(reason) + if self._didConnect: +- for widget in self._localObjects.values(): ++ for widget in list(self._localObjects.values()): + widget.connectionLost(reason) + self.factory.removeClient(self.clientID) + +@@ -1316,8 +1316,8 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + + + def callRemote(self, methodName, *args): +- requestID = u's2c%i' % (self._requestIDCounter(),) +- message = (u'call', (unicode(methodName, 'ascii'), requestID, args)) ++ requestID = 's2c%i' % (self._requestIDCounter(),) ++ message = ('call', (str(methodName, 'ascii'), requestID, args)) + resultD = defer.Deferred() + self._remoteCalls[requestID] = resultD + self.addMessage(message) +@@ -1439,12 +1439,13 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + raise AttributeError(methodName) + + +- def liveTransportMessageReceived(self, ctx, (action, args)): ++ def liveTransportMessageReceived(self, ctx, xxx_todo_changeme): + """ + A message was received from the reliable transport layer. Process it by + dispatching it first to myself, then later to application code if + applicable. + """ ++ (action, args) = xxx_todo_changeme + method = getattr(self, 'action_' + action) + method(ctx, *args) + +@@ -1472,11 +1473,11 @@ class LivePage(rend.Page, _HasJSClass, _HasCSSModule): + result.value.args) + else: + result = ( +- u'Divmod.Error', +- [u'%s: %s' % ( ++ 'Divmod.Error', ++ ['%s: %s' % ( + result.type.__name__.decode('ascii'), + result.getErrorMessage().decode('ascii'))]) +- message = (u'respond', (unicode(requestId), success, result)) ++ message = ('respond', (str(requestId), success, result)) + self.addMessage(message) + result.addBoth(_cbCall) + +@@ -1517,7 +1518,7 @@ def _rewriteEventHandlerToAttribute(tag): + """ + if isinstance(tag, stan.Tag): + extraAttributes = {} +- for i in xrange(len(tag.children) - 1, -1, -1): ++ for i in range(len(tag.children) - 1, -1, -1): + if isinstance(tag.children[i], stan.Tag) and tag.children[i].tagName == 'athena:handler': + info = tag.children.pop(i) + name = info.attributes['event'].encode('ascii') +@@ -1565,7 +1566,7 @@ def _rewriteAthenaId(tag): + if headers is not None: + ids = headers.split() + headers = [_mangleId(headerId) for headerId in ids] +- for n in xrange(len(headers) - 1, 0, -1): ++ for n in range(len(headers) - 1, 0, -1): + headers.insert(n, ' ') + tag.attributes['headers'] = headers + return tag +@@ -1580,7 +1581,7 @@ def rewriteAthenaIds(root): + + + class _LiveMixin(_HasJSClass, _HasCSSModule): +- jsClass = u'Nevow.Athena.Widget' ++ jsClass = 'Nevow.Athena.Widget' + cssModule = None + + preprocessors = [rewriteEventHandlerNodes, rewriteAthenaIds] +@@ -1633,7 +1634,7 @@ class _LiveMixin(_HasJSClass, _HasCSSModule): + C{self.page}, add this object to the page and fill the I{athena:id} + slot with this object's Athena identifier. + """ +- assert isinstance(self.jsClass, unicode), "jsClass must be a unicode string" ++ assert isinstance(self.jsClass, str), "jsClass must be a unicode string" + + if self.page is None: + raise OrphanedFragment(self) +@@ -1681,7 +1682,7 @@ class _LiveMixin(_HasJSClass, _HasCSSModule): + # different module from whence nevow.athena and nevow.testutil could + # import it. -exarkun + from nevow.testutil import FakeRequest +- s = StringIO.StringIO() ++ s = io.StringIO() + for _ in _flat.flatten(FakeRequest(), s.write, what, False, False): + pass + return s.getvalue() +@@ -1713,15 +1714,15 @@ class _LiveMixin(_HasJSClass, _HasCSSModule): + del children[0] + + self._structuredCache = { +- u'requiredModules': [(name, flat.flatten(url).decode('utf-8')) ++ 'requiredModules': [(name, flat.flatten(url).decode('utf-8')) + for (name, url) in requiredModules], +- u'requiredCSSModules': [flat.flatten(url).decode('utf-8') ++ 'requiredCSSModules': [flat.flatten(url).decode('utf-8') + for url in requiredCSSModules], +- u'class': self.jsClass, +- u'id': self._athenaID, +- u'initArguments': tuple(self.getInitialArguments()), +- u'markup': markup, +- u'children': children} ++ 'class': self.jsClass, ++ 'id': self._athenaID, ++ 'initArguments': tuple(self.getInitialArguments()), ++ 'markup': markup, ++ 'children': children} + return self._structuredCache + + +@@ -1741,9 +1742,9 @@ class _LiveMixin(_HasJSClass, _HasCSSModule): + # This will only be set if _structured() is being run. + if context.get('children') is not None: + context.get('children').append({ +- u'class': self.jsClass, +- u'id': self._athenaID, +- u'initArguments': self.getInitialArguments()}) ++ 'class': self.jsClass, ++ 'id': self._athenaID, ++ 'initArguments': self.getInitialArguments()}) + context.get('requiredModules').extend(requiredModules) + context.get('requiredCSSModules').extend(requiredCSSModules) + return tag +@@ -1792,7 +1793,7 @@ class _LiveMixin(_HasJSClass, _HasCSSModule): + return self.page.callRemote( + "Nevow.Athena.callByAthenaID", + self._athenaID, +- unicode(methodName, 'ascii'), ++ str(methodName, 'ascii'), + varargs) + + +@@ -1998,7 +1999,7 @@ class IntrospectionFragment(LiveFragment): + the state of a live page. + """ + +- jsClass = u'Nevow.Athena.IntrospectionWidget' ++ jsClass = 'Nevow.Athena.IntrospectionWidget' + + docFactory = loaders.stan( + tags.span(render=tags.directive('liveFragment'))[ +--- nevow/canvas.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/canvas.py +@@ -11,7 +11,7 @@ from nevow.flat import flatten + from nevow.stan import Proto, Tag + from itertools import count + +-cn = count().next ++cn = count().__next__ + cookie = lambda: str(cn()) + + _hookup = {} +@@ -193,7 +193,7 @@ class GroupBase(object): + l[[a(v=x) for x in colors]], + l[[a(v=x) for x in alphas]], + l[[a(v=x) for x in ratios]], +- d[[i(k=k, v=v) for (k, v) in matrix.items()]]) ++ d[[i(k=k, v=v) for (k, v) in list(matrix.items())]]) + + def text(self, text, x, y, height, width): + """Place the given text on the canvas using the given x, y, height and width. +@@ -212,7 +212,7 @@ class GroupBase(object): + cook = cookie() + I = Image(cook, self) + self.call('image', cook, where) +- print "IMAGE", where ++ print("IMAGE", where) + return I + + def sound(self, where, stream=True): +@@ -354,18 +354,18 @@ class CanvasSocket(GroupBase): + + def handle_onMouseUp(self, info): + if self.delegate.onMouseUp: +- self.delegate.onMouseUp(self, *map(int, map(float, info.split()))) ++ self.delegate.onMouseUp(self, *list(map(int, list(map(float, info.split()))))) + + def handle_onMouseDown(self, info): + if self.delegate.onMouseDown: +- self.delegate.onMouseDown(self, *map(int, map(float, info.split()))) ++ self.delegate.onMouseDown(self, *list(map(int, list(map(float, info.split()))))) + + def handle_onMouseMove(self, info): + if self.delegate.onMouseMove: +- self.delegate.onMouseMove(self, *map(int, map(float, info.split()))) ++ self.delegate.onMouseMove(self, *list(map(int, list(map(float, info.split()))))) + + def handle_diagnostic(self, info): +- print "Trace", info ++ print("Trace", info) + + canvasServerMessage = loaders.stan(tags.html["This server dispatches for nevow canvas events."]) + +--- nevow/context.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/context.py +@@ -2,8 +2,8 @@ + # Copyright (c) 2004 Divmod. + # See LICENSE for details. + +-from __future__ import generators + ++ + import warnings + + from nevow import stan +@@ -109,7 +109,7 @@ class WebContext(object): + + contextParent = currentContext.parent + if contextParent is None: +- raise KeyError, "Interface %s was not remembered." % key ++ raise KeyError("Interface %s was not remembered." % key) + + currentContext = contextParent + +@@ -151,7 +151,7 @@ class WebContext(object): + if data is not Unset: + return data + if currentContext.parent is None: +- raise KeyError, "Slot named '%s' was not filled." % name ++ raise KeyError("Slot named '%s' was not filled." % name) + currentContext = currentContext.parent + + def clone(self, deep=True, cloneTags=True): +--- nevow/dirlist.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/dirlist.py +@@ -5,7 +5,7 @@ + + # system imports + import os +-import urllib ++import urllib.request, urllib.parse, urllib.error + import stat + + # twisted imports +@@ -49,7 +49,7 @@ class DirectoryLister(rend.Page): + files = []; dirs = [] + + for path in directory: +- url = urllib.quote(path, '/') ++ url = urllib.parse.quote(path, '/') + if os.path.isdir(os.path.join(self.path, path)): + url = url + '/' + dirs.append({ +@@ -65,7 +65,7 @@ class DirectoryLister(rend.Page): + self.contentTypes, self.contentEncodings, self.defaultType) + try: + filesize = os.stat(os.path.join(self.path, path))[stat.ST_SIZE] +- except OSError, x: ++ except OSError as x: + if x.errno != 2 and x.errno != 13: + raise x + else: +@@ -80,7 +80,7 @@ class DirectoryLister(rend.Page): + + def data_header(self, context, data): + request = context.locate(inevow.IRequest) +- return "Directory listing for %s" % urllib.unquote(request.uri) ++ return "Directory listing for %s" % urllib.parse.unquote(request.uri) + + def render_tableLink(self, context, data): + return tags.a(href=data['link'])[data['linktext']] +--- nevow/entities.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/entities.py +@@ -10,7 +10,8 @@ import types + + __by_number = {} + +-def makeEntity((name, num, description)): ++def makeEntity(xxx_todo_changeme): ++ (name, num, description) = xxx_todo_changeme + from nevow.stan import Entity + e = Entity(name, num, description) + __by_number[types.IntType(num)] = e +--- nevow/events.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/events.py +@@ -16,7 +16,7 @@ class EventNotification: + Returns a token which should be passed to unsubscribe when done. + """ + if DEBUG: +- print "SUBSCRIBE", self, identifier, subscriber ++ print("SUBSCRIBE", self, identifier, subscriber) + self._subscribers.setdefault(identifier, []).append(subscriber) + return identifier, subscriber + +@@ -24,7 +24,7 @@ class EventNotification: + """Unsubscribe the given token from events. + """ + if DEBUG: +- print "UNSUBSCRIBE", token ++ print("UNSUBSCRIBE", token) + identifier, reference = token + self._subscribers[identifier].remove(reference) + +@@ -32,14 +32,14 @@ class EventNotification: + """Notify the listeners on a given identifier that an event has occurred. + """ + if DEBUG: +- print "PUBLISH", self, identifier, ++ print("PUBLISH", self, identifier, end=' ') + subscribers = self._subscribers.get(identifier, []) + for sub in subscribers: + sub(*args) + if DEBUG: +- print "NOTIFY SUBSCRIBER", sub ++ print("NOTIFY SUBSCRIBER", sub) + if DEBUG: +- print "done" ++ print("done") + + def nextId(self): + self._currentId += 1 +--- nevow/flat/flatstan.py.orig 2016-01-26 23:52:18 UTC ++++ nevow/flat/flatstan.py +@@ -1,10 +1,10 @@ + # Copyright (c) 2004 Divmod. + # See LICENSE for details. + +-from __future__ import generators + +-import urllib, warnings + ++import urllib.request, urllib.parse, urllib.error, warnings ++ + from twisted.python import log, failure + + from nevow import util +@@ -38,7 +38,7 @@ def TagSerializer(original, context, contextIsMine=Fal + visible = bool(original.tagName) + + if visible and context.isAttrib: +- raise RuntimeError, "Tried to render tag '%s' in an tag attribute context." % (original.tagName) ++ raise RuntimeError("Tried to render tag '%s' in an tag attribute context." % (original.tagName)) + + if context.precompile and original.macro: + toBeRenderedBy = original.macro +@@ -53,7 +53,7 @@ def TagSerializer(original, context, contextIsMine=Fal + ## TODO: Do we really need to bypass precompiling for *all* specials? + ## Perhaps just render? + if context.precompile and ( +- [x for x in original._specials.values() ++ [x for x in list(original._specials.values()) + if x is not None and x is not Unset] + or original.slotData): + ## The tags inside this one get a "fresh" parent chain, because +@@ -111,7 +111,7 @@ def TagSerializer(original, context, contextIsMine=Fal + yield '<%s' % original.tagName + if original.attributes: + attribContext = WovenContext(parent=context, precompile=context.precompile, isAttrib=True) +- for (k, v) in sorted(original.attributes.iteritems()): ++ for (k, v) in sorted(original.attributes.items()): + if v is None: + continue + yield ' %s="' % k +@@ -155,7 +155,7 @@ def StringSerializer(original, context): + if context.inURL: + # The magic string "-_.!*'()" also appears in url.py. Thinking about + # changing this? Change that, too. +- return urllib.quote(original, safe="-_.!*'()") ++ return urllib.parse.quote(original, safe="-_.!*'()") + ## quote it + if context.inJS: + original = _jsSingleQuoteQuote(original) +@@ -235,7 +235,7 @@ def FunctionSerializer(original, context, nocontextfun + else: + result = original(context, data) + except StopIteration: +- raise RuntimeError, "User function %r raised StopIteration." % original ++ raise RuntimeError("User function %r raised StopIteration." % original) + return serialize(result, context) + + +--- nevow/guard.py.orig 2016-02-17 12:51:40 UTC ++++ nevow/guard.py +@@ -16,7 +16,7 @@ try: + from hashlib import md5 + except ImportError: + from md5 import md5 +-import StringIO ++import io + + from zope.interface import implements + +@@ -68,7 +68,7 @@ class GuardSession(components.Componentized): + # XXX TODO: need to actually sort avatars by login order! + if len(self.portals) != 1: + raise RuntimeError("Ambiguous request for current avatar.") +- return self.portals.values()[0][0] ++ return list(self.portals.values())[0][0] + + def resourceForPortal(self, port): + return self.portals.get(port) +@@ -86,7 +86,7 @@ class GuardSession(components.Componentized): + raise RuntimeError("Ambiguous request for current avatar.") + self.setResourceForPortal( + rsrc, +- self.portals.keys()[0], ++ list(self.portals.keys())[0], + logout) + + def setResourceForPortal(self, rsrc, port, logout): +@@ -148,7 +148,7 @@ class GuardSession(components.Componentized): + del self.guard.sessions[self.uid] + + # Logout of all portals +- for portal in self.portals.keys(): ++ for portal in list(self.portals.keys()): + self.portalLogout(portal) + + for c in self.expireCallbacks: +@@ -170,7 +170,7 @@ class GuardSession(components.Componentized): + self.checkExpiredID = None + # If I haven't been touched in 15 minutes: + if time.time() - self.lastModified > self.lifetime / 2: +- if self.guard.sessions.has_key(self.uid): ++ if self.uid in self.guard.sessions: + self.expire() + else: + log.msg("no session to expire: %s" % str(self.uid)) +@@ -180,7 +180,7 @@ class GuardSession(components.Componentized): + self.checkExpired) + def __getstate__(self): + d = self.__dict__.copy() +- if d.has_key('checkExpiredID'): ++ if 'checkExpiredID' in d: + del d['checkExpiredID'] + return d + +@@ -196,7 +196,7 @@ def urlToChild(ctx, *ar, **kw): + u = u.child(stan.xml(segment)) + if inevow.IRequest(ctx).method == 'POST': + u = u.clear() +- for k,v in kw.items(): ++ for k,v in list(kw.items()): + u = u.replace(k, v) + + return u +@@ -272,7 +272,8 @@ class SessionWrapper: + def renderHTTP(self, ctx): + request = inevow.IRequest(ctx) + d = defer.maybeDeferred(self._delegate, ctx, []) +- def _cb((resource, segments), ctx): ++ def _cb(xxx_todo_changeme1, ctx): ++ (resource, segments) = xxx_todo_changeme1 + assert not segments + res = inevow.IResource(resource) + return res.renderHTTP(ctx) +@@ -425,7 +426,7 @@ class SessionWrapper: + if spoof and hasattr(session, 'args'): + request.args = session.args + request.fields = session.fields +- request.content = StringIO.StringIO() ++ request.content = io.StringIO() + request.content.close() + request.method = session.method + request.requestHeaders = session._requestHeaders +@@ -450,9 +451,10 @@ class SessionWrapper: + + if authCommand == LOGIN_AVATAR: + subSegments = segments[1:] +- def unmangleURL((res,segs)): ++ def unmangleURL(xxx_todo_changeme): + # Tell the session that we just logged in so that it will + # remember form values for us. ++ (res,segs) = xxx_todo_changeme + session.justLoggedIn = True + # Then, generate a redirect back to where we're supposed to be + # by looking at the root of the site and calculating the path +@@ -533,7 +535,8 @@ class SessionWrapper: + self._cbLoginSuccess, session, segments + ) + +- def _cbLoginSuccess(self, (iface, res, logout), session, segments): ++ def _cbLoginSuccess(self, xxx_todo_changeme2, session, segments): ++ (iface, res, logout) = xxx_todo_changeme2 + session.setResourceForPortal(res, self.portal, logout) + return res, segments + +--- nevow/json.py.orig 2016-05-08 19:28:50 UTC ++++ nevow/json.py +@@ -43,12 +43,12 @@ class StringTokenizer(object): + SLASH = "\\" + + IT = iter(s) +- bits = [IT.next()] ++ bits = [next(IT)] + for char in IT: + bits.append(char) + if char == SLASH: + try: +- bits.append(IT.next()) ++ bits.append(next(IT)) + except StopIteration: + return None + if char == '"': +@@ -82,9 +82,9 @@ class WhitespaceToken(object): + + def jsonlong(s): + if 'e' in s: +- m, e = map(long, s.split('e', 1)) ++ m, e = list(map(int, s.split('e', 1))) + else: +- m, e = long(s), 0 ++ m, e = int(s), 0 + return m * 10 ** e + + # list of tuples, the first element is a compiled regular expression the second +@@ -115,7 +115,7 @@ def tokenise(s): + tok, tokstr = action(m.group(0)) + break + else: +- raise ValueError, "Invalid Input, %r" % (s[:10],) ++ raise ValueError("Invalid Input, %r" % (s[:10],)) + + if tok is not WhitespaceToken: + tokens.append(tok) +@@ -126,7 +126,7 @@ def tokenise(s): + def accept(want, tokens): + t = tokens.pop(0) + if want != t: +- raise ParseError, "Unexpected %r, %s expected" % (t , want) ++ raise ParseError("Unexpected %r, %s expected" % (t , want)) + + def parseValue(tokens): + if tokens[0] == '{': +@@ -141,28 +141,28 @@ def parseValue(tokens): + if type(tokens[0]) == StringToken: + return parseString(tokens) + +- if type(tokens[0]) in (int, float, long): ++ if type(tokens[0]) in (int, float, int): + return tokens.pop(0), tokens + +- raise ParseError, "Unexpected %r" % tokens[0] ++ raise ParseError("Unexpected %r" % tokens[0]) + + + _stringExpr = re.compile( +- ur'(?:\\x(?P[a-fA-F0-9]{2})) # Match hex-escaped unicode' u'\n' +- ur'|' u'\n' +- ur'(?:\\u(?P[a-fA-F0-9]{4})) # Match hex-escaped high unicode' u'\n' +- ur'|' u'\n' +- ur'(?P\\[fbntr\\"]) # Match escaped control characters' u'\n', ++ r'(?:\\x(?P[a-fA-F0-9]{2})) # Match hex-escaped unicode' '\n' ++ r'|' '\n' ++ r'(?:\\u(?P[a-fA-F0-9]{4})) # Match hex-escaped high unicode' '\n' ++ r'|' '\n' ++ r'(?P\\[fbntr\\"]) # Match escaped control characters' '\n', + re.VERBOSE) + + _controlMap = { +- u'\\f': u'\f', +- u'\\b': u'\b', +- u'\\n': u'\n', +- u'\\t': u'\t', +- u'\\r': u'\r', +- u'\\"': u'"', +- u'\\\\': u'\\', ++ '\\f': '\f', ++ '\\b': '\b', ++ '\\n': '\n', ++ '\\t': '\t', ++ '\\r': '\r', ++ '\\"': '"', ++ '\\\\': '\\', + } + + def _stringSub(m): +@@ -170,14 +170,14 @@ def _stringSub(m): + if u is None: + u = m.group('unicode2') + if u is not None: +- return unichr(int(u, 16)) ++ return chr(int(u, 16)) + c = m.group('control') + return _controlMap[c] + + + def parseString(tokens): + if type(tokens[0]) is not StringToken: +- raise ParseError, "Unexpected %r" % tokens[0] ++ raise ParseError("Unexpected %r" % tokens[0]) + s = _stringExpr.sub(_stringSub, tokens.pop(0)[1:-1].decode('utf-8')) + return s, tokens + +@@ -229,27 +229,27 @@ def parse(s): + tokens = tokenise(s) + value, tokens = parseValue(tokens) + if tokens: +- raise ParseError, "Unexpected %r" % tokens[0] ++ raise ParseError("Unexpected %r" % tokens[0]) + return value + + class CycleError(Exception): + pass + +-_translation = dict([(o, u'\\x%02x' % (o,)) for o in range(0x20)]) ++_translation = dict([(o, '\\x%02x' % (o,)) for o in range(0x20)]) + + # Characters which cannot appear as literals in the output + _translation.update({ +- ord(u'\\'): u'\\\\', +- ord(u'"'): ur'\"', +- ord(u'\f'): ur'\f', +- ord(u'\b'): ur'\b', +- ord(u'\n'): ur'\n', +- ord(u'\t'): ur'\t', +- ord(u'\r'): ur'\r', ++ ord('\\'): '\\\\', ++ ord('"'): r'\"', ++ ord('\f'): r'\f', ++ ord('\b'): r'\b', ++ ord('\n'): r'\n', ++ ord('\t'): r'\t', ++ ord('\r'): r'\r', + # The next two are sneaky, see + # http://timelessrepo.com/json-isnt-a-javascript-subset +- ord(u'\u2028'): u'\\u2028', +- ord(u'\u2029'): u'\\u2029', ++ ord('\u2028'): '\\u2028', ++ ord('\u2029'): '\\u2029', + }) + + def stringEncode(s): +@@ -259,18 +259,18 @@ def stringEncode(s): + def _serialize(obj, w, seen): + from nevow import athena + +- if isinstance(obj, types.BooleanType): ++ if isinstance(obj, bool): + if obj: + w('true') + else: + w('false') +- elif isinstance(obj, (int, long, float)): ++ elif isinstance(obj, (int, float)): + w(str(obj)) +- elif isinstance(obj, unicode): ++ elif isinstance(obj, str): + w('"') + w(stringEncode(obj)) + w('"') +- elif isinstance(obj, types.NoneType): ++ elif isinstance(obj, type(None)): + w('null') + elif id(obj) in seen: + raise CycleError(type(obj)) +@@ -283,7 +283,7 @@ def _serialize(obj, w, seen): + w(']') + elif isinstance(obj, dict): + w('{') +- for n, (k, v) in enumerate(obj.iteritems()): ++ for n, (k, v) in enumerate(obj.items()): + _serialize(k, w, seen) + w(':') + _serialize(v, w, seen) +--- nevow/livetrial/testcase.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/livetrial/testcase.py +@@ -62,7 +62,7 @@ class TestCase(athena.LiveFragment, unittest.TestCase) + + Subclasses may want to override this. + """ +- return u'' ++ return '' + + + def head(self): +@@ -150,11 +150,11 @@ class TestLoader(runner.TestLoader): + + + def loadMethod(self, method): +- raise NotImplementedError, 'livetests must be classes' ++ raise NotImplementedError('livetests must be classes') + + + def loadClass(self, klass): +- if not (isinstance(klass, type) or isinstance(klass, types.ClassType)): ++ if not (isinstance(klass, type) or isinstance(klass, type)): + raise TypeError("%r is not a class" % (klass,)) + if not self.isTestCase(klass): + raise ValueError("%r is not a test case" % (klass,)) +@@ -171,7 +171,7 @@ class TestLoader(runner.TestLoader): + + + def isTestCase(self, obj): +- return isinstance(obj, (type, types.ClassType)) and issubclass(obj, TestCase) and obj is not TestCase ++ return isinstance(obj, type) and issubclass(obj, TestCase) and obj is not TestCase + + + def _findTestClasses(self, module): +--- nevow/query.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/query.py +@@ -47,7 +47,7 @@ class QueryList(tpc.Adapter): + yield x.clone(deep=False, clearPattern=True) + + if default is None: +- raise stan.NodeNotFound, ("pattern", pattern) ++ raise stan.NodeNotFound("pattern", pattern) + if hasattr(default, 'clone'): + while True: yield default.clone(deep=False) + else: +@@ -86,13 +86,13 @@ class QuerySlot(QueryList): + + class QueryNeverFind(tpc.Adapter): + def patternGenerator(self, pattern, default=None): +- raise stan.NodeNotFound, ('pattern', pattern) ++ raise stan.NodeNotFound('pattern', pattern) + + def allPatterns(self, pattern): + return [] + + def onePattern(self, pattern): +- raise stan.NodeNotFound, ('pattern', pattern) ++ raise stan.NodeNotFound('pattern', pattern) + + def _locatePatterns(self, pattern, default, loop=True): + return [] +--- nevow/rend.py.orig 2016-02-17 12:51:40 UTC ++++ nevow/rend.py +@@ -18,7 +18,7 @@ Mostly, you'll use the renderers: + """ + + from time import time as now +-from cStringIO import StringIO ++from io import StringIO + import random + import warnings + +@@ -298,7 +298,7 @@ class ConfigurableFactory: + ... + ... docFactory = stan(render_forms). + """ +- if filter(lambda x: issubclass(x, annotate.TypedInterface), providedBy(self)): ++ if [x for x in providedBy(self) if issubclass(x, annotate.TypedInterface)]: + warnings.warn("[0.5] Subclassing TypedInterface to declare annotations is deprecated. Please provide bind_* methods on your Page or Fragment subclass instead.", DeprecationWarning) + from formless import configurable + return configurable.TypedInterfaceConfigurable(self) +@@ -329,7 +329,7 @@ def defaultsFactory(ctx): + defaults = webform.FormDefaults() + if co is not None: + e = iformless.IFormErrors(co, {}) +- for k, v in e.items(): ++ for k, v in list(e.items()): + defaults.getAllDefaults(k).update(v.partialForm) + return defaults + +@@ -341,7 +341,7 @@ def errorsFactory(ctx): + errs = webform.FormErrors() + if co is not None: + e = iformless.IFormErrors(co, {}) +- for k, v in e.items(): ++ for k, v in list(e.items()): + errs.updateErrors(k, v.errors) + errs.setError(k, v.formErrorMessage) + return errs +@@ -408,7 +408,7 @@ class Fragment(DataFactory, RenderFactory, MacroFactor + finally: + self.docFactory.pattern = old + self.docFactory.precompiledDoc = None +- except TypeError, e: ++ except TypeError as e: + # Avert your eyes now! I don't want to catch anything but IQ + # adaption exceptions here but all I get is TypeError. This whole + # section of code is a complete hack anyway so one more won't +@@ -546,7 +546,7 @@ class Page(Fragment, ConfigurableFactory, ChildLookupM + + def finishRequest(): + carryover = request.args.get('_nevow_carryover_', [None])[0] +- if carryover is not None and _CARRYOVER.has_key(carryover): ++ if carryover is not None and carryover in _CARRYOVER: + del _CARRYOVER[carryover] + if self.afterRender is not None: + return util.maybeDeferred(self.afterRender,ctx) +@@ -668,7 +668,7 @@ class Page(Fragment, ConfigurableFactory, ChildLookupM + magicCookie = '%s%s%s' % (now(),request.getClientIP(),random.random()) + refpath = refpath.replace('_nevow_carryover_', magicCookie) + _CARRYOVER[magicCookie] = C = tpc.Componentized() +- for k, v in aspects.iteritems(): ++ for k, v in aspects.items(): + C.setComponent(k, v) + + destination = flat.flatten(refpath, ctx) +@@ -768,7 +768,7 @@ def mapping(context, data): + + + """ +- for k, v in data.items(): ++ for k, v in list(data.items()): + context.fillSlots(k, v) + return context.tag + +@@ -799,7 +799,7 @@ class FourOhFour: + # Look for an application-remembered handler + try: + notFoundHandler = ctx.locate(inevow.ICanHandleNotFound) +- except KeyError, e: ++ except KeyError as e: + return self.notFound + # Call the application-remembered handler but if there are any errors + # then log it and fallback to the standard message. +@@ -809,7 +809,7 @@ class FourOhFour: + log.err() + return self.notFound + +- def __nonzero__(self): ++ def __bool__(self): + return False + + +--- nevow/scripts/nit.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/scripts/nit.py +@@ -50,7 +50,7 @@ def run(): + config = NitOptions() + try: + config.parseOptions() +- except UsageError, ue: ++ except UsageError as ue: + raise SystemExit("%s: %s" % (sys.argv[0], ue)) + else: + if not config['testmodules']: +--- nevow/scripts/xmlgettext.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/scripts/xmlgettext.py +@@ -1,5 +1,5 @@ + from xml.dom import pulldom +-from cStringIO import StringIO ++from io import StringIO + from twisted.python import usage + import nevow + +@@ -38,14 +38,14 @@ class LineBasedStream(object): + + def getMsgID(node): + out = StringIO() +- print >>out, 'msgid ""' ++ print('msgid ""', file=out) + for child in node.childNodes: + s = child.toxml('utf-8') + s = s.replace('\\', '\\\\') + s = s.replace('"', '\\"') + s = s.replace('\n', '\\n') +- print >>out, '"%s"' % s +- print >>out, 'msgstr ""' ++ print('"%s"' % s, file=out) ++ print('msgstr ""', file=out) + return out.getvalue() + + def process(filename, messages): +@@ -67,14 +67,14 @@ def process(filename, messages): + + + def report(messages): +- for msgid, locations in messages.items(): ++ for msgid, locations in list(messages.items()): + for line in locations: +- print line +- print msgid ++ print(line) ++ print(msgid) + + class GettextOptions(usage.Options): + def opt_version(self): +- print 'Nevow version:', nevow.__version__ ++ print('Nevow version:', nevow.__version__) + usage.Options.opt_version(self) + + def parseArgs(self, *files): +--- nevow/stan.py.orig 2016-01-26 23:52:18 UTC ++++ nevow/stan.py +@@ -21,7 +21,7 @@ code. See nevow.stan.Tag for details, and nevow.tags f + prototypes for all of the XHTML element types. + """ + +-from __future__ import generators ++ + from zope.interface import implements + + from nevow import inevow +@@ -147,7 +147,7 @@ class slot(object): + """Prevent an infinite loop if someone tries to do + for x in slot('foo'): + """ +- raise NotImplementedError, "Stan slot instances are not iterable." ++ raise NotImplementedError("Stan slot instances are not iterable.") + + + +@@ -362,11 +362,11 @@ class Tag(object): + return self + + for name in self.specials: +- if kw.has_key(name): ++ if name in kw: + setattr(self, name, kw[name]) + del kw[name] + +- for k, v in kw.iteritems(): ++ for k, v in kw.items(): + if k[-1] == '_': + k = k[:-1] + elif k[0] == '_': +@@ -403,7 +403,7 @@ class Tag(object): + """Prevent an infinite loop if someone tries to do + for x in stantaginstance: + """ +- raise NotImplementedError, "Stan tag instances are not iterable." ++ raise NotImplementedError("Stan tag instances are not iterable.") + + def _clearSpecials(self): + """Clears all the specials in this tag. For use by flatstan. +@@ -496,7 +496,7 @@ class Tag(object): + + + class UnsetClass: +- def __nonzero__(self): ++ def __bool__(self): + return False + def __repr__(self): + return "Unset" +@@ -546,18 +546,18 @@ class PatternTag(object): + through a sequence of matching patterns.''' + + def __init__(self, patterner): +- self.pat = patterner.next() ++ self.pat = next(patterner) + self.patterner = patterner + +- def next(self): ++ def __next__(self): + if self.pat: + p, self.pat = self.pat, None + return p +- return self.patterner.next() ++ return next(self.patterner) + + + def makeForwarder(name): +- return lambda self, *args, **kw: getattr(self.next(), name)(*args, **kw) ++ return lambda self, *args, **kw: getattr(next(self), name)(*args, **kw) + + for forward in ['__call__', '__getitem__', 'fillSlots']: + setattr(PatternTag, forward, makeForwarder(forward)) +@@ -591,7 +591,7 @@ def _locatePatterns(tag, pattern, default, loop=True): + yield cloned + + if default is None: +- raise NodeNotFound, ("pattern", pattern) ++ raise NodeNotFound("pattern", pattern) + if hasattr(default, 'clone'): + while True: yield default.clone(deep=False) + else: +--- nevow/static.py.orig 2015-10-20 22:44:09 UTC ++++ nevow/static.py +@@ -7,7 +7,7 @@ + + # System Imports + import os, string, time +-import cStringIO ++import io + import traceback + import warnings + StringIO = cStringIO +@@ -144,7 +144,7 @@ def loadMimeTypes(mimetype_locations=['/etc/mime.types + def getTypeAndEncoding(filename, types, encodings, defaultType): + p, ext = os.path.splitext(filename) + ext = ext.lower() +- if encodings.has_key(ext): ++ if ext in encodings: + enc = encodings[ext] + ext = os.path.splitext(p)[1].lower() + else: +@@ -300,7 +300,7 @@ class File: + + try: + f = self.openForReading() +- except IOError, e: ++ except IOError as e: + import errno + if e[0] == errno.EACCES: + return ForbiddenResource().render(request) +--- nevow/test/test_athena.py.orig 2016-02-17 12:51:40 UTC ++++ nevow/test/test_athena.py +@@ -1,6 +1,6 @@ + + import os, sets +-from itertools import izip ++ + from xml.dom.minidom import parseString + + from twisted.trial import unittest +@@ -35,7 +35,7 @@ class MappingResourceTests(unittest.TestCase): + L{athena.MappingResource} isn't directly renderable. + """ + m = athena.MappingResource({}) +- self.failUnless(isinstance(m.renderHTTP(None), rend.FourOhFour)) ++ self.assertTrue(isinstance(m.renderHTTP(None), rend.FourOhFour)) + + + def test_lookupNonExistentKey(self): +@@ -44,7 +44,7 @@ class MappingResourceTests(unittest.TestCase): + for a non-existent key. + """ + m = athena.MappingResource({'name': 'value'}) +- self.assertEquals(m.locateChild(None, ('key',)), rend.NotFound) ++ self.assertEqual(m.locateChild(None, ('key',)), rend.NotFound) + + + def test_lookupKey(self): +@@ -55,8 +55,8 @@ class MappingResourceTests(unittest.TestCase): + m = athena.MappingResource({'name': 'value'}) + m.resourceFactory = sets.Set + resource, segments = m.locateChild(None, ('name',)) +- self.assertEquals(segments, []) +- self.assertEquals(resource, sets.Set('value')) ++ self.assertEqual(segments, []) ++ self.assertEqual(resource, sets.Set('value')) + + + +@@ -68,7 +68,7 @@ class ModuleRegistryTestMixin: + """ + C{getModuleForName} should return the right kind of module. + """ +- moduleName = u'test_getModuleForName' ++ moduleName = 'test_getModuleForName' + mapping = {moduleName: self.mktemp()} + reg = self.registryClass(mapping) + mod = reg.getModuleForName(moduleName) +@@ -82,7 +82,7 @@ class ModuleRegistryTestMixin: + C{getModuleForName} should get angry if we ask for a module which + doesn't exist. + """ +- moduleName = u'test_getModuleForName' ++ moduleName = 'test_getModuleForName' + reg = self.registryClass({}) + self.assertRaises( + RuntimeError, +@@ -104,7 +104,7 @@ class CSSRegistryTests(unittest.TestCase, ModuleRegist + L{athena.CSSRegistry} should initialize its mapping from + L{athena.allCSSPackages} as needed. + """ +- moduleName = u'test_getModuleForNameLoad' ++ moduleName = 'test_getModuleForNameLoad' + origAllCSSPackages = athena.allCSSPackages + theCSSPackages = {moduleName: self.mktemp()} + athena.allCSSPackages = lambda: theCSSPackages +@@ -131,7 +131,7 @@ class JSDependenciesTests(unittest.TestCase, ModuleReg + L{athena.JSDependencies} should initialize its mapping from + L{athena.allCSSPackages} as needed. + """ +- moduleName = u'test_getModuleForNameLoad' ++ moduleName = 'test_getModuleForNameLoad' + origAllJavascriptPackages = athena.allJavascriptPackages + theJavascriptPackages = {moduleName: self.mktemp()} + athena.allJavascriptPackages = lambda: theJavascriptPackages +@@ -180,7 +180,7 @@ the end + m2 = self.moduleClass.getOrCreate('testmodule', modules) + + self.assertTrue(isinstance(m1, self.moduleClass)) +- self.assertEquals(m1.name, 'testmodule') ++ self.assertEqual(m1.name, 'testmodule') + + self.assertIdentical(m1, m2) + +@@ -219,7 +219,7 @@ the end + m = self.moduleClass.getOrCreate('testmodule', modules) + deps = [d.name for d in m.dependencies()] + deps.sort() +- self.assertEquals(deps, ['Another', 'ExampleModule', 'Module']) ++ self.assertEqual(deps, ['Another', 'ExampleModule', 'Module']) + + + def test_allDependencies(self): +@@ -246,7 +246,7 @@ the end + # that depends upon them. + self.assertIn(d, allDeps) + self.assertIn(depMod, allDeps) +- self.failUnless(allDeps.index(d) < allDeps.index(depMod)) ++ self.assertTrue(allDeps.index(d) < allDeps.index(depMod)) + + + def test_crlfNewlines(self): +@@ -268,7 +268,7 @@ the end + module = self.moduleClass('Foo', modules) + fooDependencies = list(module.dependencies()) + self.assertEqual(len(fooDependencies), 1) +- self.assertEqual(fooDependencies[0].name, u'Bar') ++ self.assertEqual(fooDependencies[0].name, 'Bar') + + + def test_dependencyCaching(self): +@@ -290,15 +290,15 @@ the end + m._extractImports = _extractImports + + list(m.dependencies()) +- self.assertEquals(m.extractCounter, 1) ++ self.assertEqual(m.extractCounter, 1) + + list(m.dependencies()) +- self.assertEquals(m.extractCounter, 1) ++ self.assertEqual(m.extractCounter, 1) + + newTime = m.lastModified + os.utime(testModuleFilename, (newTime + 1, newTime + 1)) + list(m.dependencies()) +- self.assertEquals(m.extractCounter, 2) ++ self.assertEqual(m.extractCounter, 2) + + + def test_packageDependencies(self): +@@ -306,11 +306,11 @@ the end + L{athena.AthenaModule} should include a module's package in its + dependencies. + """ +- modules = {u'Foo': self.mktemp(), u'Foo.Bar': self.mktemp()} +- file(modules[u'Foo'], 'wb').close() +- file(modules[u'Foo.Bar'], 'wb').close() +- foo = self.moduleClass.getOrCreate(u'Foo', modules) +- bar = self.moduleClass.getOrCreate(u'Foo.Bar', modules) ++ modules = {'Foo': self.mktemp(), 'Foo.Bar': self.mktemp()} ++ file(modules['Foo'], 'wb').close() ++ file(modules['Foo.Bar'], 'wb').close() ++ foo = self.moduleClass.getOrCreate('Foo', modules) ++ bar = self.moduleClass.getOrCreate('Foo.Bar', modules) + self.assertIn(foo, bar.allDependencies()) + + +@@ -318,7 +318,7 @@ the end + """ + L{athena.AthenaModule} should C{repr} to something helpful. + """ +- moduleName = u'Foo.Bar' ++ moduleName = 'Foo.Bar' + module = self.moduleClass( + moduleName, {moduleName: self.mktemp()}) + self.assertEqual( +@@ -445,11 +445,11 @@ class ModuleInteractionTests(unittest.TestCase): + namespaces. + """ + cssModule = athena.CSSModule.getOrCreate( +- u'test_separateModuleNamespace', +- {u'test_separateModuleNamespace': self.mktemp()}) ++ 'test_separateModuleNamespace', ++ {'test_separateModuleNamespace': self.mktemp()}) + jsModule = athena.JSModule.getOrCreate( +- u'test_separateModuleNamespace', +- {u'test_separateModuleNamespace': self.mktemp()}) ++ 'test_separateModuleNamespace', ++ {'test_separateModuleNamespace': self.mktemp()}) + self.assertNotIdentical(cssModule, jsModule) + self.assertTrue(isinstance(cssModule, athena.CSSModule)) + self.assertTrue(isinstance(jsModule, athena.JSModule)) +@@ -477,10 +477,10 @@ class _AutoPackageTestMixin: + return path + + expected = { +- u'Foo': childPath('Foo', '__init__.' + self.moduleExtension), +- u'Foo.Bar': childPath('Foo', 'Bar.' + self.moduleExtension), +- u'Foo.Baz': util.sibpath(athena.__file__, 'empty-module.' + self.moduleExtension), +- u'Foo.Baz.Quux': childPath('Foo', 'Baz', 'Quux.' + self.moduleExtension)} ++ 'Foo': childPath('Foo', '__init__.' + self.moduleExtension), ++ 'Foo.Bar': childPath('Foo', 'Bar.' + self.moduleExtension), ++ 'Foo.Baz': util.sibpath(athena.__file__, 'empty-module.' + self.moduleExtension), ++ 'Foo.Baz.Quux': childPath('Foo', 'Baz', 'Quux.' + self.moduleExtension)} + + childPath('Foo', '.foo.' + self.moduleExtension) + os.mkdir(os.path.join(packageDir, 'Foo', '.test')) +@@ -489,10 +489,10 @@ class _AutoPackageTestMixin: + childPath('Foo', 'Zot.other') + + package = self.packageFactory(packageDir) +- for module, path in expected.iteritems(): ++ for module, path in expected.items(): + m = package.mapping.pop(module) +- self.assertEquals(m, path) +- self.assertEquals(package.mapping, {}) ++ self.assertEqual(m, path) ++ self.assertEqual(package.mapping, {}) + + + +@@ -558,7 +558,7 @@ class UtilitiesTests(unittest.TestCase): + tag = tags.span[athena.handler(event='onclick', handler='bar')] + mutated = athena._rewriteEventHandlerToAttribute(tag) + output = flat.flatten(mutated) +- self.assertEquals( ++ self.assertEqual( + output, + '') + +@@ -569,7 +569,7 @@ class UtilitiesTests(unittest.TestCase): + macro is. + """ + tag = ["hello", " ", "world"] +- self.assertEquals( ++ self.assertEqual( + athena._rewriteEventHandlerToAttribute(tag), + tag) + +@@ -618,7 +618,7 @@ class UtilitiesTests(unittest.TestCase): + renderDeferred = renderPage(page) + def rendered(result): + page.action_close(None) +- self.assertEquals(preprocessed, [[tag]]) ++ self.assertEqual(preprocessed, [[tag]]) + renderDeferred.addCallback(rendered) + return renderDeferred + +@@ -683,7 +683,7 @@ class StandardLibraryTestCase(unittest.TestCase): + def _importTest(self, moduleName): + mod = self.deps.getModuleForName(moduleName) + inspect = [dep for dep in mod.allDependencies() if dep.name == moduleName] +- self.failUnless(inspect) ++ self.assertTrue(inspect) + + + def test_divmodImport(self): +@@ -777,14 +777,14 @@ class Nesting(unittest.TestCase): + tf1.setFragmentParent(lp) + tf2.setFragmentParent(tf1) + +- self.assertEquals(lp.liveFragmentChildren, [tf1]) +- self.assertEquals(tf1.liveFragmentChildren, [tf2]) +- self.assertEquals(tf2.liveFragmentChildren, []) +- self.assertEquals(tf2.fragmentParent, tf1) +- self.assertEquals(tf1.fragmentParent, lp) ++ self.assertEqual(lp.liveFragmentChildren, [tf1]) ++ self.assertEqual(tf1.liveFragmentChildren, [tf2]) ++ self.assertEqual(tf2.liveFragmentChildren, []) ++ self.assertEqual(tf2.fragmentParent, tf1) ++ self.assertEqual(tf1.fragmentParent, lp) + +- self.assertEquals(tf2.page, lp) +- self.assertEquals(tf1.page, lp) ++ self.assertEqual(tf2.page, lp) ++ self.assertEqual(tf1.page, lp) + + + def testInsideOutFragmentNesting(self): +@@ -799,13 +799,13 @@ class Nesting(unittest.TestCase): + innerFragment.setFragmentParent(outerFragment) + outerFragment.setFragmentParent(page) + +- self.assertEquals(page.liveFragmentChildren, [outerFragment]) +- self.assertEquals(outerFragment.fragmentParent, page) +- self.assertEquals(outerFragment.page, page) ++ self.assertEqual(page.liveFragmentChildren, [outerFragment]) ++ self.assertEqual(outerFragment.fragmentParent, page) ++ self.assertEqual(outerFragment.page, page) + +- self.assertEquals(outerFragment.liveFragmentChildren, [innerFragment]) +- self.assertEquals(innerFragment.fragmentParent, outerFragment) +- self.assertEquals(innerFragment.page, page) ++ self.assertEqual(outerFragment.liveFragmentChildren, [innerFragment]) ++ self.assertEqual(innerFragment.fragmentParent, outerFragment) ++ self.assertEqual(innerFragment.page, page) + + + +@@ -816,14 +816,14 @@ class Tracebacks(unittest.TestCase): + + stack = '\n'.join(['%s@%s:%d' % frame for frame in frames]) + +- exc = {u'name': 'SomeError', +- u'message': 'An error occurred.', +- u'stack': stack} ++ exc = {'name': 'SomeError', ++ 'message': 'An error occurred.', ++ 'stack': stack} + + def testStackParsing(self): + p = athena.parseStack(self.stack) +- for iframe, oframe in izip(self.frames[::-1], p): +- self.assertEquals(oframe, iframe) ++ for iframe, oframe in zip(self.frames[::-1], p): ++ self.assertEqual(oframe, iframe) + + def testStackLengthAndOrder(self): + f = athena.getJSFailure(self.exc, {}) +@@ -842,7 +842,8 @@ class _DelayedCall(object): + + + def mappend(transport): +- def send((ack, messages)): ++ def send(xxx_todo_changeme): ++ (ack, messages) = xxx_todo_changeme + transport.append(messages[:]) + return send + +@@ -900,9 +901,9 @@ class Transport(unittest.TestCase): + """ + self.rdm.addOutput(mappend(self.transport)) + self.rdm.addMessage(self.theMessage) +- self.assertEquals(self.transport, [[(0, self.theMessage)]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage)]]) + self.rdm.addMessage(self.theMessage) +- self.assertEquals(self.transport, [[(0, self.theMessage)]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage)]]) + + + def testSendMessageQueued(self): +@@ -912,7 +913,7 @@ class Transport(unittest.TestCase): + """ + self.rdm.addMessage(self.theMessage) + self.rdm.addOutput(mappend(self.transport)) +- self.assertEquals(self.transport, [[(0, self.theMessage)]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage)]]) + + + def testMultipleQueuedMessages(self): +@@ -923,7 +924,7 @@ class Transport(unittest.TestCase): + self.rdm.addMessage(self.theMessage) + self.rdm.addMessage(self.theMessage.encode('hex')) + self.rdm.addOutput(mappend(self.transport)) +- self.assertEquals(self.transport, [[(0, self.theMessage), (1, self.theMessage.encode('hex'))]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage), (1, self.theMessage.encode('hex'))]]) + + + def testMultipleQueuedOutputs(self): +@@ -935,8 +936,8 @@ class Transport(unittest.TestCase): + self.rdm.addOutput(mappend(self.transport)) + self.rdm.addOutput(mappend(secondTransport)) + self.rdm.addMessage(self.theMessage) +- self.assertEquals(self.transport, [[(0, self.theMessage)]]) +- self.assertEquals(secondTransport, []) ++ self.assertEqual(self.transport, [[(0, self.theMessage)]]) ++ self.assertEqual(secondTransport, []) + + + def testMessageRedelivery(self): +@@ -951,15 +952,15 @@ class Transport(unittest.TestCase): + self.rdm.addMessage(self.theMessage) + self.rdm.addMessage(secondMessage) + self.rdm.addOutput(mappend(self.transport)) +- self.assertEquals(self.transport, [[(0, self.theMessage), (1, secondMessage)]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage), (1, secondMessage)]]) + self.rdm.addOutput(mappend(secondTransport)) +- self.assertEquals(secondTransport, [[(0, self.theMessage), (1, secondMessage)]]) ++ self.assertEqual(secondTransport, [[(0, self.theMessage), (1, secondMessage)]]) + self.rdm.basketCaseReceived(None, [0, []]) + self.rdm.addOutput(mappend(thirdTransport)) +- self.assertEquals(thirdTransport, [[(1, secondMessage)]]) ++ self.assertEqual(thirdTransport, [[(1, secondMessage)]]) + self.rdm.basketCaseReceived(None, [1, []]) + self.rdm.addOutput(mappend(fourthTransport)) +- self.assertEquals(fourthTransport, []) ++ self.assertEqual(fourthTransport, []) + + + def testConnectTimeout(self): +@@ -969,15 +970,15 @@ class Transport(unittest.TestCase): + established. + """ + n, f, a, kw = self.scheduled.pop() +- self.failIf(self.scheduled, "Too many tasks scheduled.") ++ self.assertFalse(self.scheduled, "Too many tasks scheduled.") + +- self.assertEquals(n, self.connectTimeout) ++ self.assertEqual(n, self.connectTimeout) + f(*a, **kw) + +- self.assertEquals(len(self.events), 1) ++ self.assertEqual(len(self.events), 1) + self.events[0].trap(athena.ConnectFailed) + +- self.failIf(self.scheduled, "Unexpected task scheduled after connect failed.") ++ self.assertFalse(self.scheduled, "Unexpected task scheduled after connect failed.") + + + def testConnectSucceeds(self): +@@ -985,12 +986,12 @@ class Transport(unittest.TestCase): + Test that the connection timeout is cancelled when an output channel is + added. + """ +- self.failUnless(self.scheduled, "No connect timeout scheduled.") # Sanity check ++ self.assertTrue(self.scheduled, "No connect timeout scheduled.") # Sanity check + self.rdm.addOutput(mappend(self.transport)) + n, f, a, kw = self.scheduled.pop() +- self.assertEquals(n, self.idleTimeout) +- self.failIf(self.scheduled, "Output channel added but there is still a task pending.") +- self.assertEquals(self.transport, [], "Received unexpected output.") ++ self.assertEqual(n, self.idleTimeout) ++ self.assertFalse(self.scheduled, "Output channel added but there is still a task pending.") ++ self.assertEqual(self.transport, [], "Received unexpected output.") + + + def test_connectionMade(self): +@@ -1016,15 +1017,15 @@ class Transport(unittest.TestCase): + self.rdm.addOutput(mappend(self.transport)) + + n, f, a, kw = self.scheduled.pop() +- self.failIf(self.scheduled, "Too many tasks scheduled.") ++ self.assertFalse(self.scheduled, "Too many tasks scheduled.") + +- self.assertEquals(n, self.transportlessTimeout) ++ self.assertEqual(n, self.transportlessTimeout) + f(*a, **kw) + +- self.assertEquals(len(self.events), 1) ++ self.assertEqual(len(self.events), 1) + self.events[0].trap(athena.ConnectionLost) + +- self.failIf(self.scheduled, "Unexpected task scheduled after connection lost.") ++ self.assertFalse(self.scheduled, "Unexpected task scheduled after connection lost.") + + + def testMessageConsumedOutputTimeout(self): +@@ -1037,15 +1038,15 @@ class Transport(unittest.TestCase): + self.rdm.addMessage(self.theMessage) + + n, f, a, kw = self.scheduled.pop() +- self.failIf(self.scheduled, "Too many tasks scheduled.") ++ self.assertFalse(self.scheduled, "Too many tasks scheduled.") + +- self.assertEquals(n, self.transportlessTimeout) ++ self.assertEqual(n, self.transportlessTimeout) + f(*a, **kw) + +- self.assertEquals(len(self.events), 1) ++ self.assertEqual(len(self.events), 1) + self.events[0].trap(athena.ConnectionLost) + +- self.failIf(self.scheduled, "Unexpected task scheduled after connection lost.") ++ self.assertFalse(self.scheduled, "Unexpected task scheduled after connection lost.") + + + def testOutputConnectionAdded(self): +@@ -1056,17 +1057,17 @@ class Transport(unittest.TestCase): + self.rdm.addMessage(self.theMessage) + self.rdm.addOutput(mappend(self.transport)) + +- self.assertEquals(len(self.scheduled), 1, "Transportless timeout not created.") ++ self.assertEqual(len(self.scheduled), 1, "Transportless timeout not created.") + n, f, a, kw = self.scheduled[0] +- self.assertEquals(n, self.transportlessTimeout, "Unexpected task still scheduled after output added.") ++ self.assertEqual(n, self.transportlessTimeout, "Unexpected task still scheduled after output added.") + + self.rdm.basketCaseReceived(None, [0, []]) + + n, f, a, kw = self.scheduled.pop() +- self.assertEquals(n, self.idleTimeout) ++ self.assertEqual(n, self.idleTimeout) + +- self.failIf(self.scheduled, "Unexpected task still scheduled after output added.") +- self.failIf(self.events, "Unexpectedly received some kind of event.") ++ self.assertFalse(self.scheduled, "Unexpected task still scheduled after output added.") ++ self.assertFalse(self.events, "Unexpectedly received some kind of event.") + + + def testIdleOutputTimeout(self): +@@ -1077,12 +1078,12 @@ class Transport(unittest.TestCase): + self.rdm.addOutput(mappend(self.transport)) + + n, f, a, kw = self.scheduled.pop() +- self.assertEquals(n, self.idleTimeout) +- self.failIf(self.scheduled, "Unexpected tasks still scheduled in addition to idle timeout task.") ++ self.assertEqual(n, self.idleTimeout) ++ self.assertFalse(self.scheduled, "Unexpected tasks still scheduled in addition to idle timeout task.") + + f(*a, **kw) + +- self.assertEquals(self.transport, [[]]) ++ self.assertEqual(self.transport, [[]]) + + + def testIdleTimeoutStartsOutputlessTimeout(self): +@@ -1093,16 +1094,16 @@ class Transport(unittest.TestCase): + self.rdm.addOutput(mappend(self.transport)) + + n, f, a, kw = self.scheduled.pop() +- self.assertEquals(n, self.idleTimeout) ++ self.assertEqual(n, self.idleTimeout) + f(*a, **kw) + +- self.failIf(self.events, "Unexpectedly received some events.") ++ self.assertFalse(self.events, "Unexpectedly received some events.") + + n, f, a, kw = self.scheduled.pop() +- self.assertEquals(n, self.transportlessTimeout) ++ self.assertEqual(n, self.transportlessTimeout) + f(*a, **kw) + +- self.assertEquals(len(self.events), 1) ++ self.assertEqual(len(self.events), 1) + self.events[0].trap(athena.ConnectionLost) + + +@@ -1116,15 +1117,15 @@ class Transport(unittest.TestCase): + + # The connection timeout should have been cancelled and + # replaced with an idle timeout. +- self.assertEquals(len(self.scheduled), 1) ++ self.assertEqual(len(self.scheduled), 1) + n, f, a, kw = self.scheduled[0] +- self.assertEquals(n, self.idleTimeout) ++ self.assertEqual(n, self.idleTimeout) + + self.rdm.addMessage(self.theMessage) +- self.assertEquals(self.transport, []) ++ self.assertEqual(self.transport, []) + + self.rdm.unpause() +- self.assertEquals(self.transport, [[(0, self.theMessage)]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage)]]) + + + def testTransportlessPause(self): +@@ -1137,10 +1138,10 @@ class Transport(unittest.TestCase): + + self.rdm.pause() + self.rdm.addMessage(self.theMessage) +- self.assertEquals(self.transport, []) ++ self.assertEqual(self.transport, []) + + self.rdm.unpause() +- self.assertEquals(self.transport, [[(0, self.theMessage)]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage)]]) + + + def testMessagelessPause(self): +@@ -1153,10 +1154,10 @@ class Transport(unittest.TestCase): + + self.rdm.pause() + self.rdm.addMessage(self.theMessage) +- self.assertEquals(self.transport, []) ++ self.assertEqual(self.transport, []) + + self.rdm.unpause() +- self.assertEquals(self.transport, [[(0, self.theMessage)]]) ++ self.assertEqual(self.transport, [[(0, self.theMessage)]]) + + + def testStaleMessages(self): +@@ -1170,7 +1171,7 @@ class Transport(unittest.TestCase): + [-1, [[0, self.theMessage], + [1, self.theMessage + "-1"], + [2, self.theMessage + "-2"]]]) +- self.assertEquals( ++ self.assertEqual( + self.outgoingMessages, + [(None, self.theMessage), + (None, self.theMessage + "-1"), +@@ -1180,14 +1181,14 @@ class Transport(unittest.TestCase): + self.rdm.basketCaseReceived( + None, + [-1, [[1, self.theMessage + "-1"]]]) +- self.assertEquals( ++ self.assertEqual( + self.outgoingMessages, + []) + + self.rdm.basketCaseReceived( + None, + [-1, [[2, self.theMessage + "-2"]]]) +- self.assertEquals( ++ self.assertEqual( + self.outgoingMessages, + []) + +@@ -1202,11 +1203,11 @@ class Transport(unittest.TestCase): + self.rdm.addOutput(mappend(self.transport)) + self.rdm.addOutput(mappend(self.transport)) + self.rdm.close() +- self.assertEquals(self.transport, [[(0, (athena.CLOSE, []))], [(0, (athena.CLOSE, []))]]) ++ self.assertEqual(self.transport, [[(0, (athena.CLOSE, []))], [(0, (athena.CLOSE, []))]]) + + self.transport = [] + self.rdm.addOutput(mappend(self.transport)) +- self.assertEquals(self.transport, [[(0, (athena.CLOSE, []))]]) ++ self.assertEqual(self.transport, [[(0, (athena.CLOSE, []))]]) + + + def testCloseBeforeConnect(self): +@@ -1215,7 +1216,7 @@ class Transport(unittest.TestCase): + ever established properly cleans up any timeouts. + """ + self.rdm.close() +- self.failIf(self.scheduled, "Expected no scheduled calls.") ++ self.assertFalse(self.scheduled, "Expected no scheduled calls.") + + + def test_closeExcessOnReceived(self): +@@ -1226,9 +1227,9 @@ class Transport(unittest.TestCase): + self.rdm.addOutput(mappend(self.transport)) + self.rdm.addOutput(mappend(secondTransport)) + d = self.rdm.basketCaseReceived(None, [0, []]) +- self.assertEquals(self.transport, [[]]) +- self.assertEquals(secondTransport, [[]]) +- self.failIf(d.called) ++ self.assertEqual(self.transport, [[]]) ++ self.assertEqual(secondTransport, [[]]) ++ self.assertFalse(d.called) + + + def test_closeExcessOnUnpaused(self): +@@ -1461,7 +1462,7 @@ class LiveMixinTestsMixin(CSSModuleTestMixin): + Our element's glue should include inline stylesheet references. + """ + element = self.elementFactory() +- element.cssModule = u'TestCSSModuleDependencies.Dependor' ++ element.cssModule = 'TestCSSModuleDependencies.Dependor' + element.docFactory = loaders.stan( + tags.div(render=tags.directive(self.liveGlueRenderer))) + +@@ -1472,8 +1473,8 @@ class LiveMixinTestsMixin(CSSModuleTestMixin): + def cbRendered(result): + expected = flat.flatten( + page.getStylesheetStan( +- [page.getCSSModuleURL(u'TestCSSModuleDependencies.Dependee'), +- page.getCSSModuleURL(u'TestCSSModuleDependencies.Dependor')])) ++ [page.getCSSModuleURL('TestCSSModuleDependencies.Dependee'), ++ page.getCSSModuleURL('TestCSSModuleDependencies.Dependor')])) + self.assertIn(expected, result) + D.addCallback(cbRendered) + return D +@@ -1484,7 +1485,7 @@ class LiveMixinTestsMixin(CSSModuleTestMixin): + Our element's glue shouldn't include redundant stylesheet references. + """ + element = self.elementFactory() +- element.cssModule = u'TestCSSModuleDependencies.Dependor' ++ element.cssModule = 'TestCSSModuleDependencies.Dependor' + element.docFactory = loaders.stan( + tags.div(render=tags.directive(self.liveGlueRenderer))) + +@@ -1499,8 +1500,8 @@ class LiveMixinTestsMixin(CSSModuleTestMixin): + def cbRendered(result): + expected = flat.flatten( + page.getStylesheetStan( +- [page.getCSSModuleURL(u'TestCSSModuleDependencies.Dependee'), +- page.getCSSModuleURL(u'TestCSSModuleDependencies.Dependor')])) ++ [page.getCSSModuleURL('TestCSSModuleDependencies.Dependee'), ++ page.getCSSModuleURL('TestCSSModuleDependencies.Dependor')])) + self.assertIn(expected, result) + D.addCallback(cbRendered) + return D +@@ -1569,7 +1570,7 @@ class LivePageTests(unittest.TestCase, CSSModuleTestMi + calling a single JavaScript function. + """ + bc = self.page._bootstrapCall( +- "SomeModule.someMethod", [u"one", 2, {u"three": 4.1}]) ++ "SomeModule.someMethod", ["one", 2, {"three": 4.1}]) + self.assertEqual( + bc, 'SomeModule.someMethod("one", 2, {"three":4.1});') + +@@ -1579,14 +1580,14 @@ class LivePageTests(unittest.TestCase, CSSModuleTestMi + L{LivePage.render_liveglue} should include modules that the + L{LivePage}'s jsClass depends on. + """ +- self.page.jsClass = u'PythonTestSupport.Dependor.PageTest' ++ self.page.jsClass = 'PythonTestSupport.Dependor.PageTest' + freq = FakeRequest() + self.page._becomeLive(url.URL.fromRequest(freq)) + ctx = WovenContext(tag=tags.div()) + ctx.remember(freq, IRequest) + self.assertEqual(self.page.render_liveglue(ctx, None), ctx.tag) +- expectDependor = flat.flatten(self.page.getImportStan(u'PythonTestSupport.Dependor')) +- expectDependee = flat.flatten(self.page.getImportStan(u'PythonTestSupport.Dependee')) ++ expectDependor = flat.flatten(self.page.getImportStan('PythonTestSupport.Dependor')) ++ expectDependee = flat.flatten(self.page.getImportStan('PythonTestSupport.Dependee')) + result = flat.flatten(ctx.tag, ctx) + self.assertIn(expectDependor, result) + self.assertIn(expectDependee, result) +@@ -1597,7 +1598,7 @@ class LivePageTests(unittest.TestCase, CSSModuleTestMi + L{athena.LivePage.render_liveglue} should include CSS modules that + the top-level C{cssModule} depends on. + """ +- self.page.cssModule = u'TestCSSModuleDependencies.Dependor' ++ self.page.cssModule = 'TestCSSModuleDependencies.Dependor' + self.page.cssModules = self._makeCSSRegistry() + + self.page._becomeLive(url.URL()) +@@ -1606,8 +1607,8 @@ class LivePageTests(unittest.TestCase, CSSModuleTestMi + self.assertEqual(self.page.render_liveglue(ctx, None), ctx.tag) + expected = flat.flatten( + self.page.getStylesheetStan( +- [self.page.getCSSModuleURL(u'TestCSSModuleDependencies.Dependee'), +- self.page.getCSSModuleURL(u'TestCSSModuleDependencies.Dependor')])) ++ [self.page.getCSSModuleURL('TestCSSModuleDependencies.Dependee'), ++ self.page.getCSSModuleURL('TestCSSModuleDependencies.Dependor')])) + self.assertIn(expected, flat.flatten(ctx.tag, ctx)) + + +@@ -1631,9 +1632,9 @@ class LivePageTests(unittest.TestCase, CSSModuleTestMi + # Nevow's URL quoting rules are weird, but this is the URL + # flattener's fault, not mine. Adjust to taste if that changes + # (it won't) -glyph +- [u"http://localhost/'%22"]), ++ ["http://localhost/'%22"]), + ("Nevow.Athena.bootstrap", +- [u'Nevow.Athena.PageWidget', u'asdf'])]) ++ ['Nevow.Athena.PageWidget', 'asdf'])]) + + + def test_renderReconnect(self): +@@ -1683,7 +1684,7 @@ class LivePageTests(unittest.TestCase, CSSModuleTestMi + page = athena.LivePage( + cssModuleRoot=theCSSModuleRoot) + self.assertEqual( +- page.getCSSModuleURL(u'X.Y'), ++ page.getCSSModuleURL('X.Y'), + theCSSModuleRoot.child('X.Y')) + + +@@ -1857,7 +1858,7 @@ class WidgetSubcommandTests(unittest.TestCase): + """ + options = widgetServiceMaker.options() + options.parseOptions(['--element', qual(DummyLiveElement)]) +- self.assertEquals(options['element'], DummyLiveElement) ++ self.assertEqual(options['element'], DummyLiveElement) + + + def test_invalidWidgetOption(self): +@@ -1897,8 +1898,8 @@ class WidgetSubcommandTests(unittest.TestCase): + Verify that the necessary interfaces for the object to be found as a + twistd subcommand plugin are provided. + """ +- self.failUnless(IPlugin.providedBy(widgetServiceMaker)) +- self.failUnless(IServiceMaker.providedBy(widgetServiceMaker)) ++ self.assertTrue(IPlugin.providedBy(widgetServiceMaker)) ++ self.assertTrue(IServiceMaker.providedBy(widgetServiceMaker)) + + + def test_makeService(self): +@@ -1910,11 +1911,11 @@ class WidgetSubcommandTests(unittest.TestCase): + 'element': DummyLiveElement, + 'port': 8080, + }) +- self.failUnless(isinstance(service, TCPServer)) ++ self.assertTrue(isinstance(service, TCPServer)) + self.assertEqual(service.args[0], 8080) +- self.failUnless(isinstance(service.args[1], NevowSite)) +- self.failUnless(isinstance(service.args[1].resource, WidgetPluginRoot)) +- self.failUnless(isinstance(service.args[1].resource.elementFactory(), ++ self.assertTrue(isinstance(service.args[1], NevowSite)) ++ self.assertTrue(isinstance(service.args[1].resource, WidgetPluginRoot)) ++ self.assertTrue(isinstance(service.args[1].resource.elementFactory(), + DummyLiveElement)) + + +@@ -1924,7 +1925,7 @@ class WidgetSubcommandTests(unittest.TestCase): + particular LiveElement properly renders that element. + """ + element = DummyLiveElement() +- element.jsClass = u'Dummy.ClassName' ++ element.jsClass = 'Dummy.ClassName' + element.docFactory = stan('the element') + page = ElementRenderingLivePage(element) + renderDeferred = renderLivePage(page) +@@ -1951,8 +1952,8 @@ class WidgetSubcommandTests(unittest.TestCase): + page2, seg = w.locateChild(None, ['']) + + # Make sure the pages aren't the same. +- self.failUnless(isinstance(page1, ElementRenderingLivePage)) +- self.failUnless(isinstance(page2, ElementRenderingLivePage)) ++ self.assertTrue(isinstance(page1, ElementRenderingLivePage)) ++ self.assertTrue(isinstance(page2, ElementRenderingLivePage)) + self.assertNotIdentical(page1, page2) + + # Make sure the elements aren't the same. +@@ -1974,7 +1975,7 @@ class WidgetSubcommandTests(unittest.TestCase): + w = WidgetPluginRoot(DummyLiveElement) + page1, seg = w.locateChild(None, ['']) + page1.element.docFactory = stan('the element') +- page1.element.jsClass = u'Dummy.ClassName' ++ page1.element.jsClass = 'Dummy.ClassName' + def cbCheckPageByClientID(result): + req = FakeRequest() + ctx = WovenContext() +--- nevow/test/test_errorhandler.py.orig 2015-10-20 22:44:10 UTC ++++ nevow/test/test_errorhandler.py +@@ -61,25 +61,28 @@ class Test404(testutil.TestCase): + """ + root = Root() + def later(resource): +- self.failUnless(isinstance(resource, rend.FourOhFour)) +- def morelater((code, html)): +- self.assertEquals(rend.FourOhFour.notFound, html) +- self.assertEquals(code, 404) ++ self.assertTrue(isinstance(resource, rend.FourOhFour)) ++ def morelater(xxx_todo_changeme): ++ (code, html) = xxx_todo_changeme ++ self.assertEqual(rend.FourOhFour.notFound, html) ++ self.assertEqual(code, 404) + return renderResource('/foo').addCallback(morelater) + return getResource(root, '/foo').addCallback(later) + + def test_remembered404Handler(self): +- def later((code, html)): +- self.assertEquals(html, NotFoundHandler.html) +- self.assertEquals(code, 404) ++ def later(xxx_todo_changeme1): ++ (code, html) = xxx_todo_changeme1 ++ self.assertEqual(html, NotFoundHandler.html) ++ self.assertEqual(code, 404) + + return renderResource('/foo', notFoundHandler=NotFoundHandler()).addCallback(later) + + def test_keyErroringNotFoundHandler(self): +- def later((code, html)): +- self.assertEquals(rend.FourOhFour.notFound, html) +- self.assertEquals(code, 404) ++ def later(xxx_todo_changeme2): ++ (code, html) = xxx_todo_changeme2 ++ self.assertEqual(rend.FourOhFour.notFound, html) ++ self.assertEqual(code, 404) + fe = self.flushLoggedErrors(BrokenException) +- self.assertEquals(len(fe), 1) ++ self.assertEqual(len(fe), 1) + return renderResource('/foo', notFoundHandler=BadNotFoundHandler()).addCallback(later) + +--- nevow/test/test_flatstan.py.orig 2015-10-20 22:44:10 UTC ++++ nevow/test/test_flatstan.py +@@ -49,21 +49,21 @@ class Base(TestCase): + + class TestSimpleSerialization(Base): + def test_serializeProto(self): +- self.assertEquals(self.render(proto), '') ++ self.assertEqual(self.render(proto), '') + + def test_serializeTag(self): + tag = proto(someAttribute="someValue") +- self.assertEquals(self.render(tag), '') ++ self.assertEqual(self.render(tag), '') + + def test_serializeChildren(self): + tag = proto(someAttribute="someValue")[ + proto + ] +- self.assertEquals(self.render(tag), '') ++ self.assertEqual(self.render(tag), '') + + def test_serializeWithData(self): + tag = proto(data=5) +- self.assertEquals(self.render(tag), '') ++ self.assertEqual(self.render(tag), '') + + def test_adaptRenderer(self): + ## This is an implementation of the "adapt" renderer +@@ -72,19 +72,19 @@ class TestSimpleSerialization(Base): + data + ] + tag = proto(data=5, render=_) +- self.assertEquals(self.render(tag), '5') ++ self.assertEqual(self.render(tag), '5') + + def test_serializeDataWithRenderer(self): + tag = proto(data=5, render=str) +- self.assertEquals(self.render(tag), '5') ++ self.assertEqual(self.render(tag), '5') + + def test_noContextRenderer(self): + def _(data): + return data + tag = proto(data=5, render=_) +- self.assertEquals(self.render(tag), '5') ++ self.assertEqual(self.render(tag), '5') + tag = proto(data=5, render=lambda data: data) +- self.assertEquals(self.render(tag), '5') ++ self.assertEqual(self.render(tag), '5') + + def test_aBunchOfChildren(self): + tag = proto[ +@@ -92,28 +92,28 @@ class TestSimpleSerialization(Base): + 5, + "A friend in need is a friend indeed" + ] +- self.assertEquals(self.render(tag), 'A Child5A friend in need is a friend indeed') ++ self.assertEqual(self.render(tag), 'A Child5A friend in need is a friend indeed') + + def test_basicPythonTypes(self): + tag = proto(data=5)[ + "A string; ", +- u"A unicode string; ", ++ "A unicode string; ", + 5, " (An integer) ", + 1.0, " (A float) ", +- 1L, " (A long) ", ++ 1, " (A long) ", + True, " (A bool) ", + ["A ", "List; "], + stan.xml(" Some xml; "), + lambda data: "A function" + ] + if self.hasBools: +- self.assertEquals(self.render(tag), "A string; A unicode string; 5 (An integer) 1.0 (A float) 1 (A long) True (A bool) A List; Some xml; A function") ++ self.assertEqual(self.render(tag), "A string; A unicode string; 5 (An integer) 1.0 (A float) 1 (A long) True (A bool) A List; Some xml; A function") + else: +- self.assertEquals(self.render(tag), "A string; A unicode string; 5 (An integer) 1.0 (A float) 1 (A long) 1 (A bool) A List; Some xml; A function") ++ self.assertEqual(self.render(tag), "A string; A unicode string; 5 (An integer) 1.0 (A float) 1 (A long) 1 (A bool) A List; Some xml; A function") + + def test_escaping(self): + tag = proto(foo="<>&\"'")["<>&\"'"] +- self.assertEquals(self.render(tag), '<>&"\'') ++ self.assertEqual(self.render(tag), '<>&"\'') + + + class TestComplexSerialization(Base): +@@ -127,11 +127,11 @@ class TestComplexSerialization(Base): + ] + ] + prelude, context, postlude = self.render(tag, precompile=True) +- self.assertEquals(prelude, "

Here's a string

") +- self.assertEquals(context.tag.tagName, "p") +- self.assertEquals(context.tag.data, 5) +- self.assertEquals(context.tag.render, str) +- self.assertEquals(postlude, '
') ++ self.assertEqual(prelude, "

Here's a string

") ++ self.assertEqual(context.tag.tagName, "p") ++ self.assertEqual(context.tag.data, 5) ++ self.assertEqual(context.tag.render, str) ++ self.assertEqual(postlude, '
') + + def test_precompileSlotData(self): + """Test that tags with slotData are not precompiled out of the +@@ -140,7 +140,7 @@ class TestComplexSerialization(Base): + tag = tags.p[tags.slot('foo')] + tag.fillSlots('foo', 'bar') + precompiled = self.render(tag, precompile=True) +- self.assertEquals(self.render(precompiled), '

bar

') ++ self.assertEqual(self.render(precompiled), '

bar

') + + + def test_precompiledSlotLocation(self): +@@ -186,7 +186,7 @@ class TestComplexSerialization(Base): + result1 = self.render(doc, precompile=True) + result2 = self.render(doc, precompile=True) + rendered = self.render(result2) +- self.assertEquals(rendered, "

Hello

5

") ++ self.assertEqual(rendered, "

Hello

5

") + + def test_precompilePrecompiled(self): + def render_same(context, data): +@@ -203,7 +203,7 @@ class TestComplexSerialization(Base): + result1 = self.render(doc, precompile=True) + result2 = self.render(result1, precompile=True) + rendered = self.render(result2) +- self.assertEquals(rendered, "

Hello

5

") ++ self.assertEqual(rendered, "

Hello

5

") + + def test_precompileDoesntChangeOriginal(self): + doc = tags.html(data="foo")[tags.p['foo'], tags.p['foo']] +@@ -211,37 +211,37 @@ class TestComplexSerialization(Base): + result = self.render(doc, precompile=True) + rendered = self.render(result) + +- self.assertEquals(len(doc.children), 2) +- self.assertEquals(rendered, "

foo

foo

") ++ self.assertEqual(len(doc.children), 2) ++ self.assertEqual(rendered, "

foo

foo

") + + def test_precompileNestedDynamics(self): + tag = self.makeComplex() + prelude, dynamic, postlude = self.render(tag, precompile=True) +- self.assertEquals(prelude, '') ++ self.assertEqual(prelude, '') + +- self.assertEquals(dynamic.tag.tagName, 'table') +- self.failUnless(dynamic.tag.children) +- self.assertEquals(dynamic.tag.data, 5) ++ self.assertEqual(dynamic.tag.tagName, 'table') ++ self.assertTrue(dynamic.tag.children) ++ self.assertEqual(dynamic.tag.data, 5) + + childPrelude, childDynamic, childPostlude = dynamic.tag.children + +- self.assertEquals(childPrelude, '
') ++ self.assertEqual(childPrelude, '') + +- self.assertEquals(postlude, '') ++ self.assertEqual(postlude, '') + + def test_precompileThenRender(self): + tag = self.makeComplex() + prerendered = self.render(tag, precompile=True) +- self.assertEquals(self.render(prerendered), '
%s
') +- self.assertEquals(childDynamic.tag.tagName, 'span') +- self.assertEquals(childDynamic.tag.render, str) +- self.assertEquals(childPostlude, '
') ++ self.assertEqual(childDynamic.tag.tagName, 'span') ++ self.assertEqual(childDynamic.tag.render, str) ++ self.assertEqual(childPostlude, '
5
') ++ self.assertEqual(self.render(prerendered), '
5
') + + def test_precompileThenMultipleRenders(self): + tag = self.makeComplex() + prerendered = self.render(tag, precompile=True) +- self.assertEquals(self.render(prerendered), '
5
') +- self.assertEquals(self.render(prerendered), '
5
') ++ self.assertEqual(self.render(prerendered), '
5
') ++ self.assertEqual(self.render(prerendered), '
5
') + + def test_patterns(self): + tag = tags.html[ +@@ -253,12 +253,12 @@ class TestComplexSerialization(Base): + ] + ] + ] +- self.assertEquals(self.render(tag), "
  1. one
  2. two
  3. three
") ++ self.assertEqual(self.render(tag), "
  1. one
  2. two
  3. three
") + + def test_precompilePatternWithNoChildren(self): + tag = tags.img(pattern='item') + pc = flat.precompile(tag) +- self.assertEquals(pc[0].tag.children, []) ++ self.assertEqual(pc[0].tag.children, []) + + def test_slots(self): + tag = tags.html[ +@@ -272,7 +272,7 @@ class TestComplexSerialization(Base): + ] + ] + ] +- self.assertEquals(self.render(tag), "
Header one.Header two.
One: 1Two: 2
") ++ self.assertEqual(self.render(tag), "
Header one.Header two.
One: 1Two: 2
") + + + def test_slotAttributeEscapingWhenPrecompiled(self): +@@ -291,7 +291,7 @@ class TestComplexSerialization(Base): + # this test passes if the precompile test is skipped. + precompiled = self.render(tag, precompile=True) + +- self.assertEquals(self.render(precompiled), '') ++ self.assertEqual(self.render(precompiled), '') + + + def test_nestedpatterns(self): +@@ -309,7 +309,7 @@ class TestComplexSerialization(Base): + ] + ] + ] +- self.assertEquals(self.render(tag), "
col1col2col3
123
456
") ++ self.assertEqual(self.render(tag), "
col1col2col3
123
456
") + + def test_cloning(self): + def data_foo(context, data): return [{'foo':'one'}, {'foo':'two'}] +@@ -334,24 +334,24 @@ class TestComplexSerialization(Base): + ] + ] + document=self.render(document, precompile=True) +- self.assertEquals(self.render(document), '
  • fooone
  • footwo
') ++ self.assertEqual(self.render(document), '
  • fooone
  • footwo
') + + def test_singletons(self): + for x in ('img', 'br', 'hr', 'base', 'meta', 'link', 'param', 'area', + 'input', 'col', 'basefont', 'isindex', 'frame'): +- self.assertEquals(self.render(tags.Proto(x)()), '<%s />' % x) ++ self.assertEqual(self.render(tags.Proto(x)()), '<%s />' % x) + + def test_nosingleton(self): + for x in ('div', 'span', 'script', 'iframe'): +- self.assertEquals(self.render(tags.Proto(x)()), '<%(tag)s>' % {'tag': x}) ++ self.assertEqual(self.render(tags.Proto(x)()), '<%(tag)s>' % {'tag': x}) + + def test_nested_data(self): + def checkContext(ctx, data): +- self.assertEquals(data, "inner") +- self.assertEquals(ctx.locate(inevow.IData, depth=2), "outer") ++ self.assertEqual(data, "inner") ++ self.assertEqual(ctx.locate(inevow.IData, depth=2), "outer") + return 'Hi' + tag = tags.html(data="outer")[tags.span(render=lambda ctx,data: ctx.tag, data="inner")[checkContext]] +- self.assertEquals(self.render(tag), "Hi") ++ self.assertEqual(self.render(tag), "Hi") + + def test_nested_remember(self): + class IFoo(Interface): +@@ -360,11 +360,11 @@ class TestComplexSerialization(Base): + implements(IFoo) + + def checkContext(ctx, data): +- self.assertEquals(ctx.locate(IFoo), Foo("inner")) +- self.assertEquals(ctx.locate(IFoo, depth=2), Foo("outer")) ++ self.assertEqual(ctx.locate(IFoo), Foo("inner")) ++ self.assertEqual(ctx.locate(IFoo, depth=2), Foo("outer")) + return 'Hi' + tag = tags.html(remember=Foo("outer"))[tags.span(render=lambda ctx,data: ctx.tag, remember=Foo("inner"))[checkContext]] +- self.assertEquals(self.render(tag), "Hi") ++ self.assertEqual(self.render(tag), "Hi") + + def test_deferredRememberInRenderer(self): + class IFoo(Interface): +@@ -376,7 +376,7 @@ class TestComplexSerialization(Base): + return IFoo(ctx) + tag = tags.invisible(render=rememberIt)[tags.invisible(render=locateIt)] + self.render(tag, wantDeferred=True).addCallback( +- lambda result: self.assertEquals(result, "bar")) ++ lambda result: self.assertEqual(result, "bar")) + + def test_deferredFromNestedFunc(self): + def outer(ctx, data): +@@ -384,14 +384,14 @@ class TestComplexSerialization(Base): + return defer.succeed(tags.p['Hello']) + return inner + self.render(tags.invisible(render=outer), wantDeferred=True).addCallback( +- lambda result: self.assertEquals(result, '

Hello

')) ++ lambda result: self.assertEqual(result, '

Hello

')) + + def test_dataContextCreation(self): + data = {'foo':'oof', 'bar':'rab'} + doc = tags.p(data=data)[tags.slot('foo'), tags.slot('bar')] + doc.fillSlots('foo', tags.invisible(data=tags.directive('foo'), render=str)) + doc.fillSlots('bar', lambda ctx,data: data['bar']) +- self.assertEquals(flat.flatten(doc), '

oofrab

') ++ self.assertEqual(flat.flatten(doc), '

oofrab

') + + def test_leaky(self): + def foo(ctx, data): +@@ -403,7 +403,7 @@ class TestComplexSerialization(Base): + tags.slot("bar"), + tags.invisible(render=str)]) + +- self.assertEquals(result, '
one
') ++ self.assertEqual(result, '
one
') + + + class TestMultipleRenderWithDirective(Base): +@@ -433,37 +433,37 @@ class TestMultipleRenderWithDirective(Base): + class TestEntity(Base): + def test_it(self): + val = self.render(entities.nbsp) +- self.assertEquals(val, ' ') ++ self.assertEqual(val, ' ') + + def test_nested(self): + val = self.render(tags.html(src=entities.quot)[entities.amp]) +- self.assertEquals(val, '&') ++ self.assertEqual(val, '&') + + def test_xml(self): + val = self.render([entities.lt, entities.amp, entities.gt]) +- self.assertEquals(val, '<&>') ++ self.assertEqual(val, '<&>') + + + class TestNoneAttribute(Base): + + def test_simple(self): + val = self.render(tags.html(foo=None)["Bar"]) +- self.assertEquals(val, "Bar") ++ self.assertEqual(val, "Bar") + + def test_slot(self): + val = self.render(tags.html().fillSlots('bar', None)(foo=tags.slot('bar'))["Bar"]) +- self.assertEquals(val, "Bar") ++ self.assertEqual(val, "Bar") + test_slot.skip = "Attribute name flattening must happen later for this to work" + + def test_deepSlot(self): + val = self.render(tags.html().fillSlots('bar', lambda c,d: None)(foo=tags.slot('bar'))["Bar"]) +- self.assertEquals(val, "Bar") ++ self.assertEqual(val, "Bar") + test_deepSlot.skip = "Attribute name flattening must happen later for this to work" + + def test_deferredSlot(self): + self.render(tags.html().fillSlots('bar', defer.succeed(None))(foo=tags.slot('bar'))["Bar"], + wantDeferred=True).addCallback( +- lambda val: self.assertEquals(val, "Bar")) ++ lambda val: self.assertEqual(val, "Bar")) + test_deferredSlot.skip = "Attribute name flattening must happen later for this to work" + + +@@ -478,7 +478,7 @@ class TestKey(Base): + tags.div(key="two", render=appendKey)[ + tags.div(render=appendKey)[ + tags.div(key="four", render=appendKey)]]]) +- self.assertEquals(val, ["one", "one.two", "one.two", "one.two.four"]) ++ self.assertEqual(val, ["one", "one.two", "one.two", "one.two.four"]) + + + +@@ -507,7 +507,7 @@ class TestDeferFlatten(Base): + # The actual test + notquiteglobals = {} + def finished(spam): +- print 'FINISHED' ++ print('FINISHED') + def error(failure): + notquiteglobals['exception'] = failure.value + def checker(result): +--- nevow/test/test_guard.py.orig 2016-04-15 14:17:07 UTC ++++ nevow/test/test_guard.py +@@ -84,7 +84,7 @@ class FakeHTTPRequest(appserver.NevowRequest): + appserver.NevowRequest.__init__(self, *args, **kw) + self._pchn = self.channel + self._cookieCache = {} +- from cStringIO import StringIO ++ from io import StringIO + self.content = StringIO() + self.requestHeaders.setRawHeaders(b'host', [b'fake.com']) + self.written = StringIO() +@@ -117,7 +117,7 @@ class FakeHTTPRequest(appserver.NevowRequest): + + def addCookie(self, k, v, *args,**kw): + appserver.NevowRequest.addCookie(self,k,v,*args,**kw) +- assert not self._cookieCache.has_key(k), "Should not be setting duplicate cookies!" ++ assert k not in self._cookieCache, "Should not be setting duplicate cookies!" + self._cookieCache[k] = (v, args, kw) + self.channel.received_cookies[k] = v + +@@ -211,7 +211,7 @@ class GuardTestSuper(TestCase): + sessions = {} + + def tearDown(self): +- for sz in self.sessions.values(): ++ for sz in list(self.sessions.values()): + sz.expire() + + def createPortal(self, realmFactory=None): +@@ -237,7 +237,7 @@ def getGuard(channel): + resource = channel.site.resource + while isinstance(resource, ParentPage): + assert len(resource.children) == 1 +- resource = resource.children.values()[0] ++ resource = list(resource.children.values())[0] + return resource + + +@@ -254,7 +254,7 @@ class GetLoggedInAvatar(rend.Page): + class GetLoggedInAnonymous(rend.Page): + def child_(self, ctx): return self + def renderHTTP(self, ctx): +- raise RuntimeError, "We weren't supposed to get here." ++ raise RuntimeError("We weren't supposed to get here.") + + class GetLoggedInRealm: + implements(IRealm) +@@ -307,8 +307,8 @@ class GuardTestFuncs: + # each time, and there should only ever be one session. + for x in range(3): + req = chan.makeFakeRequest('%s/' % self.getGuardPath(), "test", "test") +- self.assertEquals(req.written.getvalue(), "Yes") +- self.assertEquals(len(self.sessions), 1) ++ self.assertEqual(req.written.getvalue(), "Yes") ++ self.assertEqual(len(self.sessions), 1) + + + def test_sessionInit(self): +@@ -326,28 +326,28 @@ class GuardTestFuncs: + # The first thing that happens when we attempt to browse with no session + # is a cookie being set and a redirect being issued to the session url + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath()) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") + # The redirect is set immediately and should have a path segment at the beginning matching our cookie +- self.failUnless(req.responseHeaders.hasHeader('location')) +- cookie = req._cookieCache.values()[0][0] ++ self.assertTrue(req.responseHeaders.hasHeader('location')) ++ cookie = list(req._cookieCache.values())[0][0] + + # The URL should have the cookie segment in it and the correct path segments at the end +- self.assertEquals(req.responseHeaders.getRawHeaders('location')[0], ++ self.assertEqual(req.responseHeaders.getRawHeaders('location')[0], + 'http://fake.com%s/%s/xxx/yyy/' % (self.getGuardPath(), guard.SESSION_KEY+cookie, )) + + # Now, let's follow the redirect + req = req.followRedirect() + # Our session should now be set up and we will be redirected to our final destination +- self.assertEquals( ++ self.assertEqual( + req.responseHeaders.getRawHeaders('location')[0].split('?')[0], + 'http://fake.com%s/xxx/yyy/' % self.getGuardPath()) + + # Let's follow the redirect to the final page + req = req.followRedirect() +- self.failIf(req.responseHeaders.hasHeader('location')) ++ self.assertFalse(req.responseHeaders.hasHeader('location')) + + # We should have the final resource, which is an anonymous resource +- self.assertEquals(req.written.getvalue(), "No") ++ self.assertEqual(req.written.getvalue(), "No") + + + def test_sessionInit_noCookies(self): +@@ -367,23 +367,23 @@ class GuardTestFuncs: + # is a cookie being set and a redirect being issued to the session url + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath(), requestClass=FakeHTTPRequest_noCookies) + # The redirect is set immediately and should have a path segment at the beginning matching our session id +- self.failUnless(req.responseHeaders.hasHeader('location')) ++ self.assertTrue(req.responseHeaders.hasHeader('location')) + + # The URL should have the session id segment in it and the correct path segments at the end + [location] = req.responseHeaders.getRawHeaders('location') + prefix = 'http://fake.com%s/%s' % (self.getGuardPath(), guard.SESSION_KEY) + suffix = '/xxx/yyy/' +- self.failUnless(location.startswith(prefix)) +- self.failUnless(location.endswith(suffix)) ++ self.assertTrue(location.startswith(prefix)) ++ self.assertTrue(location.endswith(suffix)) + for c in location[len(prefix):-len(suffix)]: +- self.failUnless(c in '0123456789abcdef') ++ self.assertTrue(c in '0123456789abcdef') + + # Now, let's follow the redirect + req = req.followRedirect() +- self.failIf(req.responseHeaders.hasHeader('location')) ++ self.assertFalse(req.responseHeaders.hasHeader('location')) + + # We should have the final resource, which is an anonymous resource +- self.assertEquals(req.written.getvalue(), "No") ++ self.assertEqual(req.written.getvalue(), "No") + + + def testUsernamePassword(self): +@@ -393,18 +393,18 @@ class GuardTestFuncs: + + # Check the anonymous page + req = chan.makeFakeRequest('%s/' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "No") ++ self.assertEqual(req.written.getvalue(), "No") + + # Check the logged in page + req = chan.makeFakeRequest('%s/__login__/?username=test&password=test' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "Yes") ++ self.assertEqual(req.written.getvalue(), "Yes") + + # Log out + chan.makeFakeRequest("%s/__logout__" % self.getGuardPath()).followRedirect() + + # Get the anonymous page again + k = chan.makeFakeRequest("%s/" % self.getGuardPath()) +- self.assertEquals(k.written.getvalue(), "No") ++ self.assertEqual(k.written.getvalue(), "No") + + + def testLoginWithNoSession(self): +@@ -413,7 +413,7 @@ class GuardTestFuncs: + chan = self.createGuard(p) + + req = chan.makeFakeRequest('%s/__login__/?username=test&password=test' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "Yes") ++ self.assertEqual(req.written.getvalue(), "Yes") + + + def test_sessionNegotiationSavesRequestParameters(self): +@@ -431,7 +431,7 @@ class GuardTestFuncs: + + request = channel.makeFakeRequest( + '%s/?foo=1&bar=2' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(request.written.getvalue(), '') ++ self.assertEqual(request.written.getvalue(), '') + self.assertEqual( + renders, [({'foo': ['1'], 'bar': ['2']}, + None, +@@ -473,7 +473,7 @@ class GuardTestFuncs: + self.getGuardPath() + '/__login__?username=test&password=test') + request = request.followAllRedirects() + +- self.assertEquals(request.written.getvalue(), '') ++ self.assertEqual(request.written.getvalue(), '') + self.assertEqual( + renders, [({'foo': ['1'], 'bar': ['2']}, + None, +@@ -506,7 +506,7 @@ class GuardTestFuncs: + self.getGuardPath() + '/__login__?username=test&password=test') + request = request.followAllRedirects() + +- self.assertEquals(request.written.getvalue(), '') ++ self.assertEqual(request.written.getvalue(), '') + self.assertEqual( + renders, [({'username': ['test'], 'password': ['test']}, + None, +@@ -522,16 +522,16 @@ class GuardTestFuncs: + + req = chan.makeFakeRequest('%s/' % self.getGuardPath(), requestClass=FakeHTTPRequest_noCookies).followAllRedirects() + # We should have the final resource, which is an anonymous resource +- self.assertEquals(req.written.getvalue(), "No") ++ self.assertEqual(req.written.getvalue(), "No") + + # now try requesting just the guard path +- self.failUnless(req.path.startswith('%s/%s' % (self.getGuardPath(), guard.SESSION_KEY))) +- self.failUnless(req.path.endswith('/')) ++ self.assertTrue(req.path.startswith('%s/%s' % (self.getGuardPath(), guard.SESSION_KEY))) ++ self.assertTrue(req.path.endswith('/')) + req = chan.makeFakeRequest(req.path[:-1], requestClass=FakeHTTPRequest_noCookies).followAllRedirects() + + # it should work just as well as with the slash + # (not actually the same page, but SillyPage always says the same thing here) +- self.assertEquals(req.written.getvalue(), "No") ++ self.assertEqual(req.written.getvalue(), "No") + + def testTrailingSlashMatters_noCookies(self): + class TrailingSlashPage(rend.Page): +@@ -563,15 +563,15 @@ class GuardTestFuncs: + + req = chan.makeFakeRequest('%s/' % self.getGuardPath(), requestClass=FakeHTTPRequest_noCookies).followAllRedirects() + # We should have the final resource, which is an anonymous resource +- self.assertEquals(req.written.getvalue(), "Anonymous %s/" % self.getGuardPath()) ++ self.assertEqual(req.written.getvalue(), "Anonymous %s/" % self.getGuardPath()) + + # now try requesting just the guard path +- self.failUnless(req.path.startswith('%s/%s' % (self.getGuardPath(), guard.SESSION_KEY))) +- self.failUnless(req.path.endswith('/')) ++ self.assertTrue(req.path.startswith('%s/%s' % (self.getGuardPath(), guard.SESSION_KEY))) ++ self.assertTrue(req.path.endswith('/')) + req = chan.makeFakeRequest(req.path[:-1], requestClass=FakeHTTPRequest_noCookies).followAllRedirects() + + # it should no longer have the trailing slash +- self.assertEquals(req.written.getvalue(), "Anonymous %s" % self.getGuardPath()) ++ self.assertEqual(req.written.getvalue(), "Anonymous %s" % self.getGuardPath()) + + def testTrailingSlashMatters_withCookies(self): + # omitting the trailing slash when not using session keys can +@@ -608,21 +608,21 @@ class GuardTestFuncs: + + req = chan.makeFakeRequest('%s/' % self.getGuardPath()).followAllRedirects() + # We should have the final resource, which is an anonymous resource +- self.assertEquals(req.written.getvalue(), "Anonymous %s/" % self.getGuardPath()) ++ self.assertEqual(req.written.getvalue(), "Anonymous %s/" % self.getGuardPath()) + + req = chan.makeFakeRequest('%s' % self.getGuardPath()).followAllRedirects() + # We should have the final resource, which is an anonymous resource +- self.assertEquals(req.written.getvalue(), "Anonymous %s" % self.getGuardPath()) ++ self.assertEqual(req.written.getvalue(), "Anonymous %s" % self.getGuardPath()) + + def testPlainTextCookie(self): + """Cookies from non-SSL sites have no secure attribute.""" + p = self.createPortal() + chan = self.createGuard(p) + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath()) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") +- cookie, a, kw = req._cookieCache.values()[0] ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") ++ cookie, a, kw = list(req._cookieCache.values())[0] + secure = kw.get('secure', None) +- self.failIf(secure) ++ self.assertFalse(secure) + + def testPlainTextCookie_evenWithSecureCookies(self): + """Cookies from non-SSL sites have no secure attribute, even if secureCookie is true.""" +@@ -631,10 +631,10 @@ class GuardTestFuncs: + gu = getGuard(chan) + gu.secureCookies = False + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath()) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") +- cookie, a, kw = req._cookieCache.values()[0] ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") ++ cookie, a, kw = list(req._cookieCache.values())[0] + secure = kw.get('secure', None) +- self.failIf(secure) ++ self.assertFalse(secure) + + def testSecureCookie_secureCookies(self): + """Cookies from SSL sites have secure=True.""" +@@ -642,10 +642,10 @@ class GuardTestFuncs: + chan = self.createGuard(p) + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath(), + requestClass=FakeHTTPRequest_forceSSL) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") +- cookie, a, kw = req._cookieCache.values()[0] ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") ++ cookie, a, kw = list(req._cookieCache.values())[0] + secure = kw.get('secure', None) +- self.failUnless(secure) ++ self.assertTrue(secure) + + def testSecureCookie_noSecureCookies(self): + """Cookies from SSL sites do not have secure=True if secureCookies is false.""" +@@ -655,10 +655,10 @@ class GuardTestFuncs: + gu.secureCookies = False + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath(), + requestClass=FakeHTTPRequest_forceSSL) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") +- cookie, a, kw = req._cookieCache.values()[0] ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") ++ cookie, a, kw = list(req._cookieCache.values())[0] + secure = kw.get('secure', None) +- self.failIf(secure) ++ self.assertFalse(secure) + + def testPersistentCookie_persistentCookies(self): + """Cookies from sites are saved to disk because SessionWrapper.persistentCookies=True.""" +@@ -668,8 +668,8 @@ class GuardTestFuncs: + gu.persistentCookies = True + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath(), + requestClass=FakeHTTPRequest) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") +- cookie, a, kw = req._cookieCache.values()[0] ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") ++ cookie, a, kw = list(req._cookieCache.values())[0] + expires = kw.get('expires', None) + self.failIfIdentical(expires, None) + +@@ -679,8 +679,8 @@ class GuardTestFuncs: + chan = self.createGuard(p) + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath(), + requestClass=FakeHTTPRequest) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") +- cookie, a, kw = req._cookieCache.values()[0] ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") ++ cookie, a, kw = list(req._cookieCache.values())[0] + expires = kw.get('expires', None) + self.failUnlessIdentical(expires, None) + +@@ -691,13 +691,13 @@ class GuardTestFuncs: + p = self.createPortal() + chan = self.createGuard(p) + req = chan.makeFakeRequest('%s/xxx/yyy/' % self.getGuardPath()) +- self.assertEquals( len(req._cookieCache.values()), 1, "Bad number of cookies in response.") +- cookie, a, kw = req._cookieCache.values()[0] ++ self.assertEqual( len(list(req._cookieCache.values())), 1, "Bad number of cookies in response.") ++ cookie, a, kw = list(req._cookieCache.values())[0] + path = kw.get('path', None) + wanted = self.getGuardPath() + if wanted == '': + wanted = '/' +- self.failUnlessEqual(path, wanted) ++ self.assertEqual(path, wanted) + + + def test_defaultCookieDomain(self): +@@ -707,7 +707,7 @@ class GuardTestFuncs: + portal = self.createPortal() + channel = self.createGuard(portal) + request = channel.makeFakeRequest('%s/abc' % (self.getGuardPath(),)) +- cookie, args, kwargs = request._cookieCache.values()[0] ++ cookie, args, kwargs = list(request._cookieCache.values())[0] + self.assertEqual(kwargs['domain'], None) + + +@@ -729,7 +729,7 @@ class GuardTestFuncs: + channel = self.createGuard(portal) + + request = channel.makeFakeRequest('%s/abc' % (self.getGuardPath(),)) +- cookie, args, kwargs = request._cookieCache.values()[0] ++ cookie, args, kwargs = list(request._cookieCache.values())[0] + self.assertEqual(kwargs['domain'], 'example.com') + self.assertEqual(requests, [request]) + +@@ -740,8 +740,8 @@ class GuardTestFuncs: + chan = self.createGuard(p) + + req = chan.makeFakeRequest('%s/__login__/sub/path?username=test&password=test' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "Yes") +- self.assertEquals(req.path, '%s/sub/path' % self.getGuardPath()) ++ self.assertEqual(req.written.getvalue(), "Yes") ++ self.assertEqual(req.path, '%s/sub/path' % self.getGuardPath()) + + def testLoginExtraPath_withSlash(self): + p = self.createPortal() +@@ -749,8 +749,8 @@ class GuardTestFuncs: + chan = self.createGuard(p) + + req = chan.makeFakeRequest('%s/__login__/sub/path/?username=test&password=test' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "Yes") +- self.assertEquals(req.path, '%s/sub/path/' % self.getGuardPath()) ++ self.assertEqual(req.written.getvalue(), "Yes") ++ self.assertEqual(req.path, '%s/sub/path/' % self.getGuardPath()) + + def testLogoutExtraPath(self): + p = self.createPortal() +@@ -758,12 +758,12 @@ class GuardTestFuncs: + chan = self.createGuard(p) + + req = chan.makeFakeRequest('%s/__login__?username=test&password=test' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "Yes") ++ self.assertEqual(req.written.getvalue(), "Yes") + + # Log out + req2 = chan.makeFakeRequest("%s/__logout__/sub/path" % self.getGuardPath()).followRedirect() +- self.assertEquals(req2.written.getvalue(), "No") +- self.assertEquals(req2.path, '%s/sub/path' % self.getGuardPath()) ++ self.assertEqual(req2.written.getvalue(), "No") ++ self.assertEqual(req2.path, '%s/sub/path' % self.getGuardPath()) + + def testLogoutExtraPath_withSlash(self): + p = self.createPortal() +@@ -771,12 +771,12 @@ class GuardTestFuncs: + chan = self.createGuard(p) + + req = chan.makeFakeRequest('%s/__login__?username=test&password=test' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "Yes") ++ self.assertEqual(req.written.getvalue(), "Yes") + + # Log out + req2 = chan.makeFakeRequest("%s/__logout__/sub/path/" % self.getGuardPath()).followRedirect() +- self.assertEquals(req2.written.getvalue(), "No") +- self.assertEquals(req2.path, '%s/sub/path/' % self.getGuardPath()) ++ self.assertEqual(req2.written.getvalue(), "No") ++ self.assertEqual(req2.path, '%s/sub/path/' % self.getGuardPath()) + + def testGetLoggedInRoot_getLogin(self): + p = self.createPortal(realmFactory=GetLoggedInRealm) +@@ -784,7 +784,7 @@ class GuardTestFuncs: + chan = self.createGuard(p) + + req = chan.makeFakeRequest('%s/__login__?username=test&password=test' % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), "GetLoggedInAvatar") ++ self.assertEqual(req.written.getvalue(), "GetLoggedInAvatar") + + def testGetLoggedInRoot_httpAuthLogin(self): + +@@ -793,8 +793,8 @@ class GuardTestFuncs: + chan = self.createGuard(p) + for x in range(4): + req = chan.makeFakeRequest('%s/' % self.getGuardPath(), "test", "test") +- self.assertEquals(req.written.getvalue(), "GetLoggedInAvatar") +- self.assertEquals(len(self.sessions),1) ++ self.assertEqual(req.written.getvalue(), "GetLoggedInAvatar") ++ self.assertEqual(len(self.sessions),1) + + def testErrorPage_httpAuth(self): + """Failed HTTP Auth results in a 403 error.""" +@@ -806,12 +806,12 @@ class GuardTestFuncs: + req = chan.makeFakeRequest('%s' % self.getGuardPath(), + "test", "invalid-password") + self.assertFalse(req.responseHeaders.hasHeader('location')) +- self.assertEquals(req.code, 403) +- self.assertEquals(req.written.getvalue(), ++ self.assertEqual(req.code, 403) ++ self.assertEqual(req.written.getvalue(), + 'Forbidden' + +'

Forbidden

Request was forbidden.' + +'') +- self.assertEquals(req.path, self.getGuardPath()) ++ self.assertEqual(req.path, self.getGuardPath()) + + def testErrorPage_httpAuth_deep(self): + """Failed HTTP Auth results in a 403 error.""" +@@ -823,12 +823,12 @@ class GuardTestFuncs: + req = chan.makeFakeRequest('%s/quux/thud' % self.getGuardPath(), + "test", "invalid-password") + self.assertFalse(req.responseHeaders.hasHeader('location')) +- self.assertEquals(req.code, 403) +- self.assertEquals(req.written.getvalue(), ++ self.assertEqual(req.code, 403) ++ self.assertEqual(req.written.getvalue(), + 'Forbidden' + +'

Forbidden

Request was forbidden.' + +'') +- self.assertEquals(req.path, '%s/quux/thud' % self.getGuardPath()) ++ self.assertEqual(req.path, '%s/quux/thud' % self.getGuardPath()) + + def testErrorPage_getLogin(self): + """Failed normal login results in anonymous view of the same page.""" +@@ -840,11 +840,11 @@ class GuardTestFuncs: + req = chan.makeFakeRequest( + '%s/__login__?username=test&password=invalid-password' + % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), 'No') ++ self.assertEqual(req.written.getvalue(), 'No') + wanted = self.getGuardPath() + if wanted == '': + wanted = '/' +- self.assertEquals(req.path, wanted) ++ self.assertEqual(req.path, wanted) + + def testErrorPage_getLogin_deep(self): + """Failed normal login results in anonymous view of the same page.""" +@@ -856,8 +856,8 @@ class GuardTestFuncs: + req = chan.makeFakeRequest( + '%s/__login__/quux/thud?username=test&password=invalid-password' + % self.getGuardPath()).followAllRedirects() +- self.assertEquals(req.written.getvalue(), 'No') +- self.assertEquals(req.path, '%s/quux/thud' % self.getGuardPath()) ++ self.assertEqual(req.written.getvalue(), 'No') ++ self.assertEqual(req.path, '%s/quux/thud' % self.getGuardPath()) + + + class ParentPage(rend.Page): +--- nevow/test/test_howtolistings.py.orig 2015-10-20 22:44:10 UTC ++++ nevow/test/test_howtolistings.py +@@ -13,6 +13,7 @@ from nevow.testutil import renderLivePage, JavaScriptT + from nevow.athena import jsDeps, expose + + from nevow import plugins ++import importlib + + + class ExampleTestBase(object): +@@ -53,7 +54,7 @@ class ExampleTestBase(object): + jsDeps._loadPlugins = True + # Even more horrible! nevow.plugins.__path__ needs to be recomputed + # each time for the new value of sys.path. +- reload(plugins) ++ importlib.reload(plugins) + + + def tearDown(self): +@@ -64,7 +65,7 @@ class ExampleTestBase(object): + sys.modules.clear() + sys.modules.update(self.originalModules) + sys.path[:] = self.originalPath +- reload(plugins) ++ importlib.reload(plugins) + + + +@@ -83,7 +84,7 @@ class ExampleJavaScriptTestCase(JavaScriptTestCase): + base.examplePath = self.examplePath + try: + base.setUp() +- except SkipTest, e: ++ except SkipTest as e: + result.startTest(self) + result.addSkip(self, str(e)) + result.stopTest(self) +@@ -152,8 +153,8 @@ class Echo00(ExampleTestBase, TestCase): + eb = EchoElement() + echoed = [] + eb.callRemote = lambda method, message: echoed.append((method, message)) +- eb.say(u'HELLO... Hello... hello...') +- self.assertEquals(echoed, [('addText', u'HELLO... Hello... hello...')]) ++ eb.say('HELLO... Hello... hello...') ++ self.assertEqual(echoed, [('addText', 'HELLO... Hello... hello...')]) + + + +@@ -205,8 +206,8 @@ class RenderAndChat01(ExampleTestBase, TestCase): + from chatthing.chatterbox import ChatterElement, ChatRoom + cb = ChatterElement(ChatRoom()) + setUsername = expose.get(cb, 'setUsername') +- setUsername(u'jethro') +- self.assertIdentical(u'jethro', cb.username) ++ setUsername('jethro') ++ self.assertIdentical('jethro', cb.username) + + + def test_loginThenWall(self): +@@ -220,14 +221,14 @@ class RenderAndChat01(ExampleTestBase, TestCase): + cr = ChatRoom() + user1 = cr.makeChatter() + user1.wall = lambda msg: jethroHeard.append(msg) +- user1.setUsername(u'jethro') ++ user1.setUsername('jethro') + user2 = cr.makeChatter() + user2.wall = lambda msg: cletusHeard.append(msg) +- user2.setUsername(u'cletus') +- self.assertEquals(jethroHeard, +- [u' * user jethro has joined the room', +- u' * user cletus has joined the room']) +- self.assertEquals(cletusHeard, [u' * user cletus has joined the room']) ++ user2.setUsername('cletus') ++ self.assertEqual(jethroHeard, ++ [' * user jethro has joined the room', ++ ' * user cletus has joined the room']) ++ self.assertEqual(cletusHeard, [' * user cletus has joined the room']) + + + def test_sayThenHear(self): +@@ -239,18 +240,18 @@ class RenderAndChat01(ExampleTestBase, TestCase): + cr = ChatRoom() + user1 = cr.makeChatter() + user1.wall = lambda msg: msg +- user1.setUsername(u'jethro') ++ user1.setUsername('jethro') + user2 = cr.makeChatter() + user2.wall = lambda msg: msg +- user2.setUsername(u'cletus') ++ user2.setUsername('cletus') + jethroHeard = [] + cletusHeard = [] + user1.hear = lambda who, what: jethroHeard.append((who,what)) + user2.hear = lambda who, what: cletusHeard.append((who,what)) + say = expose.get(user1, 'say') +- say(u'Hey, Cletus!') +- self.assertEquals(jethroHeard, cletusHeard) +- self.assertEquals(cletusHeard, [(u'jethro', u'Hey, Cletus!')]) ++ say('Hey, Cletus!') ++ self.assertEqual(jethroHeard, cletusHeard) ++ self.assertEqual(cletusHeard, [('jethro', 'Hey, Cletus!')]) + + + def test_wallTellsClient(self): +@@ -262,8 +263,8 @@ class RenderAndChat01(ExampleTestBase, TestCase): + cb = ChatRoom().makeChatter() + heard = [] + cb.callRemote = lambda method, msg: heard.append((method, msg)) +- cb.wall(u'Message for everyone...') +- self.assertEquals(heard, [('displayMessage', u'Message for everyone...')]) ++ cb.wall('Message for everyone...') ++ self.assertEqual(heard, [('displayMessage', 'Message for everyone...')]) + + def test_hearTellsClient(self): + """ +@@ -274,6 +275,6 @@ class RenderAndChat01(ExampleTestBase, TestCase): + cb = ChatRoom().makeChatter() + heard = [] + cb.callRemote = lambda method, who, what: heard.append((method, who, what)) +- cb.hear(u'Hello', u'Chat') +- self.assertEquals(heard, [('displayUserMessage', u'Hello', u'Chat')]) ++ cb.hear('Hello', 'Chat') ++ self.assertEqual(heard, [('displayUserMessage', 'Hello', 'Chat')]) + +--- nevow/test/test_i18n.py.orig 2015-10-20 22:44:10 UTC ++++ nevow/test/test_i18n.py +@@ -1,7 +1,7 @@ + from zope.interface import implements + + from twisted.trial import unittest +-from cStringIO import StringIO ++from io import StringIO + from nevow import inevow, flat, context, tags, loaders, rend + from nevow import i18n + from nevow.testutil import FakeRequest +@@ -11,7 +11,7 @@ def mockTranslator(s, languages=None, domain=None): + if domain is not None: + args['domain'] = domain + return 'MOCK(%s)[%s]' % (', '.join(['%s=%r' % (k,v) +- for k,v in args.items()]), ++ for k,v in list(args.items())]), + s) + + class Misc(unittest.TestCase): +@@ -21,13 +21,13 @@ class Misc(unittest.TestCase): + def test_simple_flat(self): + s = i18n._('foo') + r = flat.ten.flatten(s, None) +- self.assertEquals(r, 'foo') ++ self.assertEqual(r, 'foo') + + def test_translator(self): + _ = i18n.Translator(translator=mockTranslator) + s = _('foo') + r = flat.ten.flatten(s, None) +- self.assertEquals(r, 'MOCK()[foo]') ++ self.assertEqual(r, 'MOCK()[foo]') + + class Config(unittest.TestCase): + def test_remember(self): +@@ -41,13 +41,13 @@ class Domain(unittest.TestCase): + domain='bar') + s = _('foo') + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "MOCK(domain='bar')[foo]") ++ self.assertEqual(r, "MOCK(domain='bar')[foo]") + + def test_runTime(self): + _ = i18n.Translator(translator=mockTranslator) + s = _('foo', domain='baz') + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "MOCK(domain='baz')[foo]") ++ self.assertEqual(r, "MOCK(domain='baz')[foo]") + + def test_context(self): + _ = i18n.Translator(translator=mockTranslator) +@@ -56,7 +56,7 @@ class Domain(unittest.TestCase): + ctx.remember(cfg) + s = _('foo') + r = flat.ten.flatten(s, ctx) +- self.assertEquals(r, "MOCK(domain='thud')[foo]") ++ self.assertEqual(r, "MOCK(domain='thud')[foo]") + + def test_runTime_beats_all(self): + _ = i18n.Translator(translator=mockTranslator, +@@ -66,7 +66,7 @@ class Domain(unittest.TestCase): + ctx.remember(cfg) + s = _('foo', domain='baz') + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "MOCK(domain='baz')[foo]") ++ self.assertEqual(r, "MOCK(domain='baz')[foo]") + + + def test_classInit_beats_context(self): +@@ -77,14 +77,14 @@ class Domain(unittest.TestCase): + ctx.remember(cfg) + s = _('foo') + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "MOCK(domain='baz')[foo]") ++ self.assertEqual(r, "MOCK(domain='baz')[foo]") + + class Format(unittest.TestCase): + def test_simple(self): + _ = i18n.Translator(translator=mockTranslator) + s = _('foo %s') % 'bar' + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "MOCK()[foo bar]") ++ self.assertEqual(r, "MOCK()[foo bar]") + + def test_multiple(self): + _ = i18n.Translator(translator=mockTranslator) +@@ -92,7 +92,7 @@ class Format(unittest.TestCase): + s = s % 'bar %s' + s = s % 'baz' + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "MOCK()[foo bar baz]") ++ self.assertEqual(r, "MOCK()[foo bar baz]") + + + +@@ -101,7 +101,7 @@ class Languages(unittest.TestCase): + request = FakeRequest(headers={}) + ctx = context.RequestContext(tag=request) + r = inevow.ILanguages(ctx) +- self.assertEquals(r, []) ++ self.assertEqual(r, []) + + def test_oneLanguage(self): + request = FakeRequest(headers={ +@@ -109,7 +109,7 @@ class Languages(unittest.TestCase): + }) + ctx = context.RequestContext(tag=request) + r = inevow.ILanguages(ctx) +- self.assertEquals(r, ['fo']) ++ self.assertEqual(r, ['fo']) + + def test_multipleLanguages(self): + request = FakeRequest(headers={ +@@ -117,7 +117,7 @@ class Languages(unittest.TestCase): + }) + ctx = context.RequestContext(tag=request) + r = inevow.ILanguages(ctx) +- self.assertEquals(r, ['fo', 'ba', 'th']) ++ self.assertEqual(r, ['fo', 'ba', 'th']) + + def test_quality_simple(self): + request = FakeRequest(headers={ +@@ -125,7 +125,7 @@ class Languages(unittest.TestCase): + }) + ctx = context.RequestContext(tag=request) + r = inevow.ILanguages(ctx) +- self.assertEquals(r, ['fo']) ++ self.assertEqual(r, ['fo']) + + def test_quality_sort(self): + request = FakeRequest(headers={ +@@ -133,7 +133,7 @@ class Languages(unittest.TestCase): + }) + ctx = context.RequestContext(tag=request) + r = inevow.ILanguages(ctx) +- self.assertEquals(r, ['xy', 'fo', 'ba']) ++ self.assertEqual(r, ['xy', 'fo', 'ba']) + + def test_quality_invalid_notQ(self): + request = FakeRequest(headers={ +@@ -141,7 +141,7 @@ class Languages(unittest.TestCase): + }) + ctx = context.RequestContext(tag=request) + r = inevow.ILanguages(ctx) +- self.assertEquals(r, ['ba', 'fo']) ++ self.assertEqual(r, ['ba', 'fo']) + + def test_quality_invalid_notFloat(self): + request = FakeRequest(headers={ +@@ -149,7 +149,7 @@ class Languages(unittest.TestCase): + }) + ctx = context.RequestContext(tag=request) + r = inevow.ILanguages(ctx) +- self.assertEquals(r, ['ba', 'fo']) ++ self.assertEqual(r, ['ba', 'fo']) + + class Render(unittest.TestCase): + def makePage(self, content): +@@ -172,15 +172,15 @@ class Render(unittest.TestCase): + + def test_empty(self): + return self.makePage(['']).addCallback( +- lambda r: self.assertEquals(r, 'MOCK()[]')) ++ lambda r: self.assertEqual(r, 'MOCK()[]')) + + def test_simple(self): + return self.makePage(['foo']).addCallback( +- lambda r: self.assertEquals(r, 'MOCK()[foo]')) ++ lambda r: self.assertEqual(r, 'MOCK()[foo]')) + + def test_stan(self): + return self.makePage([tags.p['You should really avoid tags in i18n input.']]).addCallback( +- lambda r: self.assertEquals(r, 'MOCK()[

You should really avoid tags in i18n input.

]')) ++ lambda r: self.assertEqual(r, 'MOCK()[

You should really avoid tags in i18n input.

]')) + + class InterpolateTests: + def test_mod_string(self): +@@ -188,7 +188,7 @@ class InterpolateTests: + 'foo bar') + + def test_mod_unicode(self): +- self.check('foo %s', u'bar', ++ self.check('foo %s', 'bar', + 'foo bar') + + def test_mod_int(self): +@@ -255,7 +255,7 @@ class InterpolateMixin: + self._ = i18n.Translator(translator=mockTranslator) + + def mangle(self, s): +- raise NotImplementedError, 'override mangle somewhere' ++ raise NotImplementedError('override mangle somewhere') + + def check(self, fmt, args, *wants): + got = self.mangle(self._(fmt) % args) +@@ -296,24 +296,24 @@ class UNGettext(unittest.TestCase): + def test_simple_flat_one(self): + s = i18n.ungettext('%d foo', '%d foos', 1) + r = flat.ten.flatten(s, None) +- self.assertEquals(r, '%d foo') ++ self.assertEqual(r, '%d foo') + + def test_simple_flat_many(self): + s = i18n.ungettext('%d foo', '%d foos', 42) + r = flat.ten.flatten(s, None) +- self.assertEquals(r, '%d foos') ++ self.assertEqual(r, '%d foos') + + def test_simple_flat_many(self): + s = i18n.ungettext('%d foo', '%d foos', 42) + r = flat.ten.flatten(s, None) +- self.assertEquals(r, '%d foos') ++ self.assertEqual(r, '%d foos') + + def test_format_one(self): + s = i18n.ungettext('%d foo', '%d foos', 1) % 1 + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "1 foo") ++ self.assertEqual(r, "1 foo") + + def test_format_many(self): + s = i18n.ungettext('%d foo', '%d foos', 42) % 42 + r = flat.ten.flatten(s, None) +- self.assertEquals(r, "42 foos") ++ self.assertEqual(r, "42 foos") +--- nevow/test/test_newflat.py.orig 2016-05-08 19:28:50 UTC ++++ nevow/test/test_newflat.py +@@ -5,7 +5,7 @@ + Tests for L{nevow._flat}. + """ + +-import sys, traceback, StringIO ++import sys, traceback, io + + from zope.interface import implements + +@@ -34,7 +34,7 @@ from nevow.context import WovenContext + # lambda to avoid adding anything else to this namespace. The result will + # be a string which agrees with the one the traceback module will put into a + # traceback for frames associated with functions defined in this file. +-HERE = (lambda: None).func_code.co_filename ++HERE = (lambda: None).__code__.co_filename + + + class TrivialRenderable(object): +@@ -118,7 +118,7 @@ class FlattenTests(TestCase, FlattenMixin): + """ + Helper to get a string from L{flatten}. + """ +- s = StringIO.StringIO() ++ s = io.StringIO() + for _ in flatten(request, s.write, root, inAttribute, inXML): + pass + return s.getvalue() +@@ -193,8 +193,8 @@ class FlattenTests(TestCase, FlattenMixin): + An instance of L{unicode} is flattened to the UTF-8 representation of + itself. + """ +- self.assertStringEqual(self.flatten(u'bytes<>&"\0'), 'bytes<>&"\0') +- unich = u"\N{LATIN CAPITAL LETTER E WITH GRAVE}" ++ self.assertStringEqual(self.flatten('bytes<>&"\0'), 'bytes<>&"\0') ++ unich = "\N{LATIN CAPITAL LETTER E WITH GRAVE}" + self.assertStringEqual(self.flatten(unich), unich.encode('utf-8')) + + +@@ -203,7 +203,7 @@ class FlattenTests(TestCase, FlattenMixin): + An L{xml} instance is flattened to the UTF-8 representation of itself. + """ + self.assertStringEqual(self.flatten(xml("foo")), "foo") +- unich = u"\N{LATIN CAPITAL LETTER E WITH GRAVE}" ++ unich = "\N{LATIN CAPITAL LETTER E WITH GRAVE}" + self.assertStringEqual(self.flatten(xml(unich)), unich.encode('utf-8')) + + +@@ -303,8 +303,8 @@ class FlattenTests(TestCase, FlattenMixin): + A L{Tag} with a C{tagName} attribute which is C{unicode} instead of + C{str} is flattened to an XML representation. + """ +- self.assertStringEqual(self.flatten(Tag(u'div')), "
") +- self.assertStringEqual(self.flatten(Tag(u'div')['']), "
") ++ self.assertStringEqual(self.flatten(Tag('div')), "
") ++ self.assertStringEqual(self.flatten(Tag('div')['']), "
") + + + def test_unicodeAttributeName(self): +@@ -313,7 +313,7 @@ class FlattenTests(TestCase, FlattenMixin): + is flattened to an XML representation. + """ + self.assertStringEqual( +- self.flatten(Tag(u'div', {u'foo': 'bar'})), '
') ++ self.flatten(Tag('div', {'foo': 'bar'})), '
') + + + def test_stringTagAttributes(self): +@@ -820,7 +820,7 @@ class FlattenTests(TestCase, FlattenMixin): + significantly greater than the Python maximum recursion limit. + """ + obj = ["foo"] +- for i in xrange(1000): ++ for i in range(1000): + obj = [obj] + self._nestingTest(obj, "foo") + +@@ -831,7 +831,7 @@ class FlattenTests(TestCase, FlattenMixin): + significantly greater than the Python maximum recursion limit. + """ + tag = div()[slot("foo-0")] +- for i in xrange(1000): ++ for i in range(1000): + tag.fillSlots("foo-" + str(i), slot("foo-" + str(i + 1))) + tag.fillSlots("foo-1000", "bar") + self._nestingTest(tag, "
bar
") +@@ -844,7 +844,7 @@ class FlattenTests(TestCase, FlattenMixin): + """ + n = 1000 + tag = div["foo"] +- for i in xrange(n - 1): ++ for i in range(n - 1): + tag = div[tag] + self._nestingTest(tag, "
" * n + "foo" + "
" * n) + +@@ -855,7 +855,7 @@ class FlattenTests(TestCase, FlattenMixin): + nesting significantly greater than the Python maximum recursion limit. + """ + obj = TrivialRenderable("foo") +- for i in xrange(1000): ++ for i in range(1000): + obj = TrivialRenderable(obj) + self._nestingTest(obj, "foo") + +@@ -971,13 +971,13 @@ class FlattenerErrorTests(TestCase): + """ + self.assertEqual( + str(FlattenerError( +- RuntimeError("reason"), [u'abc\N{SNOWMAN}xyz'], [])), ++ RuntimeError("reason"), ['abc\N{SNOWMAN}xyz'], [])), + "Exception while flattening:\n" + " u'abc\\u2603xyz'\n" # Codepoint for SNOWMAN + "RuntimeError: reason\n") + self.assertEqual( + str(FlattenerError( +- RuntimeError("reason"), [u'01234567\N{SNOWMAN}9' * 10], ++ RuntimeError("reason"), ['01234567\N{SNOWMAN}9' * 10], + [])), + "Exception while flattening:\n" + " u'01234567\\u2603901234567\\u26039<...>01234567\\u2603901234567" +@@ -1048,7 +1048,7 @@ class FlattenerErrorTests(TestCase): + + try: + f() +- except RuntimeError, exc: ++ except RuntimeError as exc: + # Get the traceback, minus the info for *this* frame + tbinfo = traceback.extract_tb(sys.exc_info()[2])[1:] + else: +@@ -1062,8 +1062,8 @@ class FlattenerErrorTests(TestCase): + " File \"%s\", line %d, in g\n" + " raise RuntimeError(\"reason\")\n" + "RuntimeError: reason\n" % ( +- HERE, f.func_code.co_firstlineno + 1, +- HERE, g.func_code.co_firstlineno + 1)) ++ HERE, f.__code__.co_firstlineno + 1, ++ HERE, g.__code__.co_firstlineno + 1)) + + + +@@ -1234,8 +1234,8 @@ class DeferflattenTests(TestCase, FlattenMixin): + frames allowed by the Python recursion limit succeeds if all the + L{Deferred}s have results already. + """ +- results = [str(i) for i in xrange(1000)] +- deferreds = map(succeed, results) ++ results = [str(i) for i in range(1000)] ++ deferreds = list(map(succeed, results)) + limit = sys.getrecursionlimit() + sys.setrecursionlimit(100) + try: +--- nevow/test/test_passobj.py.orig 2015-10-20 22:44:10 UTC ++++ nevow/test/test_passobj.py +@@ -65,7 +65,7 @@ class ObjectTester: + ] + + def someMethod(self, one, two): +- print "ONE TWO", `one`, `two` ++ print("ONE TWO", repr(one), repr(two)) + + def frobber(self, frobber, frobee): + return frobber.frobazz(frobee) +@@ -193,7 +193,7 @@ class AnotherTest: + return "Breakpoint in file %s at line %s" % (self.fn, self.ln) + + breakpoints = BreakpointRemover() +- for fn in debugInstance.breaks.keys(): ++ for fn in list(debugInstance.breaks.keys()): + for lineno in debugInstance.breaks[fn]: + breakpoints.append(BP(fn, lineno)) + return breakpoints +--- nevow/test/test_url.py.orig 2016-02-16 18:41:30 UTC ++++ nevow/test/test_url.py +@@ -5,7 +5,7 @@ + Tests for L{nevow.url}. + """ + +-import urlparse, urllib ++import urllib.parse, urllib.request, urllib.parse, urllib.error + + from nevow import context, url, inevow, util, loaders + from nevow import tags +@@ -88,7 +88,7 @@ class _IncompatibleSignatureURL(url.URL): + class TestURL(TestCase): + def test_fromString(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals(theurl, str(urlpath)) ++ self.assertEqual(theurl, str(urlpath)) + + def test_roundtrip(self): + tests = ( +@@ -108,34 +108,34 @@ class TestURL(TestCase): + ) + for test in tests: + result = str(url.URL.fromString(test)) +- self.assertEquals(test, result) ++ self.assertEqual(test, result) + + def test_fromRequest(self): + request = FakeRequest(uri='/a/nice/path/?zot=23&zut', + currentSegments=["a", "nice", "path", ""], + headers={'host': 'www.foo.com:80'}) + urlpath = url.URL.fromRequest(request) +- self.assertEquals(theurl, str(urlpath)) ++ self.assertEqual(theurl, str(urlpath)) + + def test_fromContext(self): + + r = FakeRequest(uri='/a/b/c') + urlpath = url.URL.fromContext(context.RequestContext(tag=r)) +- self.assertEquals('http://localhost/', str(urlpath)) ++ self.assertEqual('http://localhost/', str(urlpath)) + + r.prepath = ['a'] + urlpath = url.URL.fromContext(context.RequestContext(tag=r)) +- self.assertEquals('http://localhost/a', str(urlpath)) ++ self.assertEqual('http://localhost/a', str(urlpath)) + + r = FakeRequest(uri='/a/b/c?foo=bar') + r.prepath = ['a','b'] + urlpath = url.URL.fromContext(context.RequestContext(tag=r)) +- self.assertEquals('http://localhost/a/b?foo=bar', str(urlpath)) ++ self.assertEqual('http://localhost/a/b?foo=bar', str(urlpath)) + + def test_equality(self): + urlpath = url.URL.fromString(theurl) +- self.failUnlessEqual(urlpath, url.URL.fromString(theurl)) +- self.failIfEqual(urlpath, url.URL.fromString('ftp://www.anotherinvaliddomain.com/foo/bar/baz/?zot=21&zut')) ++ self.assertEqual(urlpath, url.URL.fromString(theurl)) ++ self.assertNotEqual(urlpath, url.URL.fromString('ftp://www.anotherinvaliddomain.com/foo/bar/baz/?zot=21&zut')) + + + def test_fragmentEquality(self): +@@ -148,7 +148,7 @@ class TestURL(TestCase): + + def test_parent(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals("http://www.foo.com:80/a/nice/?zot=23&zut", ++ self.assertEqual("http://www.foo.com:80/a/nice/?zot=23&zut", + str(urlpath.parent())) + + +@@ -167,98 +167,98 @@ class TestURL(TestCase): + + def test_parentdir(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals("http://www.foo.com:80/a/nice/?zot=23&zut", ++ self.assertEqual("http://www.foo.com:80/a/nice/?zot=23&zut", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a') +- self.assertEquals("http://www.foo.com/", ++ self.assertEqual("http://www.foo.com/", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a/') +- self.assertEquals("http://www.foo.com/", ++ self.assertEqual("http://www.foo.com/", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a/b') +- self.assertEquals("http://www.foo.com/", ++ self.assertEqual("http://www.foo.com/", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a/b/') +- self.assertEquals("http://www.foo.com/a/", ++ self.assertEqual("http://www.foo.com/a/", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a/b/c') +- self.assertEquals("http://www.foo.com/a/", ++ self.assertEqual("http://www.foo.com/a/", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a/b/c/') +- self.assertEquals("http://www.foo.com/a/b/", ++ self.assertEqual("http://www.foo.com/a/b/", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a/b/c/d') +- self.assertEquals("http://www.foo.com/a/b/", ++ self.assertEqual("http://www.foo.com/a/b/", + str(urlpath.parentdir())) + urlpath = url.URL.fromString('http://www.foo.com/a/b/c/d/') +- self.assertEquals("http://www.foo.com/a/b/c/", ++ self.assertEqual("http://www.foo.com/a/b/c/", + str(urlpath.parentdir())) + + def test_parent_root(self): + urlpath = url.URL.fromString('http://www.foo.com/') +- self.assertEquals("http://www.foo.com/", ++ self.assertEqual("http://www.foo.com/", + str(urlpath.parentdir())) +- self.assertEquals("http://www.foo.com/", ++ self.assertEqual("http://www.foo.com/", + str(urlpath.parentdir().parentdir())) + + def test_child(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals("http://www.foo.com:80/a/nice/path/gong?zot=23&zut", ++ self.assertEqual("http://www.foo.com:80/a/nice/path/gong?zot=23&zut", + str(urlpath.child('gong'))) +- self.assertEquals("http://www.foo.com:80/a/nice/path/gong%2F?zot=23&zut", ++ self.assertEqual("http://www.foo.com:80/a/nice/path/gong%2F?zot=23&zut", + str(urlpath.child('gong/'))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/gong%2Fdouble?zot=23&zut", + str(urlpath.child('gong/double'))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/gong%2Fdouble%2F?zot=23&zut", + str(urlpath.child('gong/double/'))) + + def test_child_init_tuple(self): +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com/a/b/c", + str(url.URL(netloc="www.foo.com", + pathsegs=['a', 'b']).child("c"))) + + def test_child_init_root(self): +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com/c", + str(url.URL(netloc="www.foo.com").child("c"))) + + def test_sibling(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/sister?zot=23&zut", + str(urlpath.sibling('sister'))) + # use an url without trailing '/' to check child removal + theurl2 = "http://www.foo.com:80/a/nice/path?zot=23&zut" + urlpath = url.URL.fromString(theurl2) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/sister?zot=23&zut", + str(urlpath.sibling('sister'))) + + def test_curdir(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals(theurl, str(urlpath)) ++ self.assertEqual(theurl, str(urlpath)) + # use an url without trailing '/' to check object removal + theurl2 = "http://www.foo.com:80/a/nice/path?zot=23&zut" + urlpath = url.URL.fromString(theurl2) +- self.assertEquals("http://www.foo.com:80/a/nice/?zot=23&zut", ++ self.assertEqual("http://www.foo.com:80/a/nice/?zot=23&zut", + str(urlpath.curdir())) + + def test_click(self): + urlpath = url.URL.fromString(theurl) + # a null uri should be valid (return here) +- self.assertEquals("http://www.foo.com:80/a/nice/path/?zot=23&zut", ++ self.assertEqual("http://www.foo.com:80/a/nice/path/?zot=23&zut", + str(urlpath.click(""))) + # a simple relative path remove the query +- self.assertEquals("http://www.foo.com:80/a/nice/path/click", ++ self.assertEqual("http://www.foo.com:80/a/nice/path/click", + str(urlpath.click("click"))) + # an absolute path replace path and query +- self.assertEquals("http://www.foo.com:80/click", ++ self.assertEqual("http://www.foo.com:80/click", + str(urlpath.click("/click"))) + # replace just the query +- self.assertEquals("http://www.foo.com:80/a/nice/path/?burp", ++ self.assertEqual("http://www.foo.com:80/a/nice/path/?burp", + str(urlpath.click("?burp"))) + # one full url to another should not generate '//' between netloc and pathsegs + self.failIfIn("//foobar", str(urlpath.click('http://www.foo.com:80/foobar'))) +@@ -266,13 +266,13 @@ class TestURL(TestCase): + # from a url with no query clicking a url with a query, + # the query should be handled properly + u = url.URL.fromString('http://www.foo.com:80/me/noquery') +- self.failUnlessEqual('http://www.foo.com:80/me/17?spam=158', ++ self.assertEqual('http://www.foo.com:80/me/17?spam=158', + str(u.click('/me/17?spam=158'))) + + # Check that everything from the path onward is removed when the click link + # has no path. + u = url.URL.fromString('http://localhost/foo?abc=def') +- self.failUnlessEqual(str(u.click('http://www.python.org')), 'http://www.python.org/') ++ self.assertEqual(str(u.click('http://www.python.org')), 'http://www.python.org/') + + + def test_cloneUnchanged(self): +@@ -383,146 +383,146 @@ class TestURL(TestCase): + ['http://localhost/a/b/c', 'd//e', 'http://localhost/a/b/d//e'], + ] + for start, click, result in tests: +- self.assertEquals( ++ self.assertEqual( + str(url.URL.fromString(start).click(click)), + result + ) + + def test_add(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut&burp", + str(urlpath.add("burp"))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut&burp=xxx", + str(urlpath.add("burp", "xxx"))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut&burp=xxx&zing", + str(urlpath.add("burp", "xxx").add("zing"))) + # note the inversion! +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut&zing&burp=xxx", + str(urlpath.add("zing").add("burp", "xxx"))) + # note the two values for the same name +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut&burp=xxx&zot=32", + str(urlpath.add("burp", "xxx").add("zot", 32))) + + def test_add_noquery(self): + # fromString is a different code path, test them both +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?foo=bar", + str(url.URL.fromString("http://www.foo.com:80/a/nice/path/") + .add("foo", "bar"))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com/?foo=bar", + str(url.URL(netloc="www.foo.com").add("foo", "bar"))) + + def test_replace(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=32&zut", + str(urlpath.replace("zot", 32))) + # replace name without value with name/value and vice-versa +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot&zut=itworked", + str(urlpath.replace("zot").replace("zut", "itworked"))) + # Q: what happens when the query has two values and we replace? + # A: we replace both values with a single one +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=32&zut", + str(urlpath.add("zot", "xxx").replace("zot", 32))) + + def test_fragment(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut#hiboy", + str(urlpath.anchor("hiboy"))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut", + str(urlpath.anchor())) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23&zut", + str(urlpath.anchor(''))) + + def test_clear(self): + urlpath = url.URL.fromString(theurl) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zut", + str(urlpath.clear("zot"))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zot=23", + str(urlpath.clear("zut"))) + # something stranger, query with two values, both should get cleared +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/?zut", + str(urlpath.add("zot", 1971).clear("zot"))) + # two ways to clear the whole query +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/", + str(urlpath.clear("zut").clear("zot"))) +- self.assertEquals( ++ self.assertEqual( + "http://www.foo.com:80/a/nice/path/", + str(urlpath.clear())) + + def test_secure(self): +- self.assertEquals(str(url.URL.fromString('http://localhost/').secure()), 'https://localhost/') +- self.assertEquals(str(url.URL.fromString('http://localhost/').secure(True)), 'https://localhost/') +- self.assertEquals(str(url.URL.fromString('https://localhost/').secure()), 'https://localhost/') +- self.assertEquals(str(url.URL.fromString('https://localhost/').secure(False)), 'http://localhost/') +- self.assertEquals(str(url.URL.fromString('http://localhost/').secure(False)), 'http://localhost/') +- self.assertEquals(str(url.URL.fromString('http://localhost/foo').secure()), 'https://localhost/foo') +- self.assertEquals(str(url.URL.fromString('http://localhost/foo?bar=1').secure()), 'https://localhost/foo?bar=1') +- self.assertEquals(str(url.URL.fromString('http://localhost/').secure(port=443)), 'https://localhost/') +- self.assertEquals(str(url.URL.fromString('http://localhost:8080/').secure(port=8443)), 'https://localhost:8443/') +- self.assertEquals(str(url.URL.fromString('https://localhost:8443/').secure(False, 8080)), 'http://localhost:8080/') ++ self.assertEqual(str(url.URL.fromString('http://localhost/').secure()), 'https://localhost/') ++ self.assertEqual(str(url.URL.fromString('http://localhost/').secure(True)), 'https://localhost/') ++ self.assertEqual(str(url.URL.fromString('https://localhost/').secure()), 'https://localhost/') ++ self.assertEqual(str(url.URL.fromString('https://localhost/').secure(False)), 'http://localhost/') ++ self.assertEqual(str(url.URL.fromString('http://localhost/').secure(False)), 'http://localhost/') ++ self.assertEqual(str(url.URL.fromString('http://localhost/foo').secure()), 'https://localhost/foo') ++ self.assertEqual(str(url.URL.fromString('http://localhost/foo?bar=1').secure()), 'https://localhost/foo?bar=1') ++ self.assertEqual(str(url.URL.fromString('http://localhost/').secure(port=443)), 'https://localhost/') ++ self.assertEqual(str(url.URL.fromString('http://localhost:8080/').secure(port=8443)), 'https://localhost:8443/') ++ self.assertEqual(str(url.URL.fromString('https://localhost:8443/').secure(False, 8080)), 'http://localhost:8080/') + + + def test_eq_same(self): + u = url.URL.fromString('http://localhost/') +- self.failUnless(u == u, "%r != itself" % u) ++ self.assertTrue(u == u, "%r != itself" % u) + + def test_eq_similar(self): + u1 = url.URL.fromString('http://localhost/') + u2 = url.URL.fromString('http://localhost/') +- self.failUnless(u1 == u2, "%r != %r" % (u1, u2)) ++ self.assertTrue(u1 == u2, "%r != %r" % (u1, u2)) + + def test_eq_different(self): + u1 = url.URL.fromString('http://localhost/a') + u2 = url.URL.fromString('http://localhost/b') +- self.failIf(u1 == u2, "%r != %r" % (u1, u2)) ++ self.assertFalse(u1 == u2, "%r != %r" % (u1, u2)) + + def test_eq_apples_vs_oranges(self): + u = url.URL.fromString('http://localhost/') +- self.failIf(u == 42, "URL must not equal a number.") +- self.failIf(u == object(), "URL must not equal an object.") ++ self.assertFalse(u == 42, "URL must not equal a number.") ++ self.assertFalse(u == object(), "URL must not equal an object.") + + def test_ne_same(self): + u = url.URL.fromString('http://localhost/') +- self.failIf(u != u, "%r == itself" % u) ++ self.assertFalse(u != u, "%r == itself" % u) + + def test_ne_similar(self): + u1 = url.URL.fromString('http://localhost/') + u2 = url.URL.fromString('http://localhost/') +- self.failIf(u1 != u2, "%r == %r" % (u1, u2)) ++ self.assertFalse(u1 != u2, "%r == %r" % (u1, u2)) + + def test_ne_different(self): + u1 = url.URL.fromString('http://localhost/a') + u2 = url.URL.fromString('http://localhost/b') +- self.failUnless(u1 != u2, "%r == %r" % (u1, u2)) ++ self.assertTrue(u1 != u2, "%r == %r" % (u1, u2)) + + def test_ne_apples_vs_oranges(self): + u = url.URL.fromString('http://localhost/') +- self.failUnless(u != 42, "URL must differ from a number.") +- self.failUnless(u != object(), "URL must be differ from an object.") ++ self.assertTrue(u != 42, "URL must differ from a number.") ++ self.assertTrue(u != object(), "URL must be differ from an object.") + + def test_parseEqualInParamValue(self): + u = url.URL.fromString('http://localhost/?=x=x=x') +- self.failUnless(u.query == ['=x=x=x']) +- self.failUnless(str(u) == 'http://localhost/?=x%3Dx%3Dx') ++ self.assertTrue(u.query == ['=x=x=x']) ++ self.assertTrue(str(u) == 'http://localhost/?=x%3Dx%3Dx') + u = url.URL.fromString('http://localhost/?foo=x=x=x&bar=y') +- self.failUnless(u.query == ['foo=x=x=x', 'bar=y']) +- self.failUnless(str(u) == 'http://localhost/?foo=x%3Dx%3Dx&bar=y') ++ self.assertTrue(u.query == ['foo=x=x=x', 'bar=y']) ++ self.assertTrue(str(u) == 'http://localhost/?foo=x%3Dx%3Dx&bar=y') + + class Serialization(TestCase): + +@@ -536,13 +536,13 @@ class Serialization(TestCase): + u = url.URL(scheme, loc, path, query, fragment) + s = flatten(url.URL(scheme, loc, path, query, fragment)) + +- parsedScheme, parsedLoc, parsedPath, parsedQuery, parsedFragment = urlparse.urlsplit(s) ++ parsedScheme, parsedLoc, parsedPath, parsedQuery, parsedFragment = urllib.parse.urlsplit(s) + +- self.assertEquals(scheme, parsedScheme) +- self.assertEquals(loc, parsedLoc) +- self.assertEquals('/' + '/'.join(map(lambda p: urllib.quote(p,safe=''),path)), parsedPath) +- self.assertEquals(query, url.unquerify(parsedQuery)) +- self.assertEquals(fragment, parsedFragment) ++ self.assertEqual(scheme, parsedScheme) ++ self.assertEqual(loc, parsedLoc) ++ self.assertEqual('/' + '/'.join([urllib.parse.quote(p,safe='') for p in path]), parsedPath) ++ self.assertEqual(query, url.unquerify(parsedQuery)) ++ self.assertEqual(fragment, parsedFragment) + + def test_slotQueryParam(self): + original = 'http://foo/bar?baz=bamf' +@@ -553,7 +553,7 @@ class Serialization(TestCase): + ctx.fillSlots('param', 5) + return ctx.tag + +- self.assertEquals(flatten(tags.invisible(render=fillIt)[u]), original + '&toot=5') ++ self.assertEqual(flatten(tags.invisible(render=fillIt)[u]), original + '&toot=5') + + def test_childQueryParam(self): + original = 'http://foo/bar' +@@ -564,7 +564,7 @@ class Serialization(TestCase): + ctx.fillSlots('param', 'baz') + return ctx.tag + +- self.assertEquals(flatten(tags.invisible(render=fillIt)[u]), original + '/baz') ++ self.assertEqual(flatten(tags.invisible(render=fillIt)[u]), original + '/baz') + + def test_strangeSegs(self): + base = 'http://localhost/' +@@ -572,35 +572,35 @@ class Serialization(TestCase): + (r'/foo/', '%2Ffoo%2F'), + (r'c:\foo\bar bar', 'c%3A%5Cfoo%5Cbar%20bar'), + (r'&<>', '%26%3C%3E'), +- (u'!"\N{POUND SIGN}$%^&*()_+'.encode('utf-8'), '!%22%C2%A3%24%25%5E%26*()_%2B'), ++ ('!"\N{POUND SIGN}$%^&*()_+'.encode('utf-8'), '!%22%C2%A3%24%25%5E%26*()_%2B'), + ) + for test, result in tests: + u = url.URL.fromString(base).child(test) +- self.assertEquals(flatten(u), base+result) ++ self.assertEqual(flatten(u), base+result) + + def test_urlContent(self): + u = url.URL.fromString('http://localhost/').child(r'') +- self.assertEquals(flatten(tags.p[u]), '

http://localhost/%3Cc%3A%5Cfoo%5Cbar%26%3E

') ++ self.assertEqual(flatten(tags.p[u]), '

http://localhost/%3Cc%3A%5Cfoo%5Cbar%26%3E

') + + def test_urlAttr(self): + u = url.URL.fromString('http://localhost/').child(r'') +- self.assertEquals(flatten(tags.img(src=u)), '') ++ self.assertEqual(flatten(tags.img(src=u)), '') + + def test_urlSlot(self): + u = url.URL.fromString('http://localhost/').child(r'') + tag = tags.img(src=tags.slot('src')) + tag.fillSlots('src', u) +- self.assertEquals(flatten(tag), '') ++ self.assertEqual(flatten(tag), '') + + def test_urlXmlAttrSlot(self): + u = url.URL.fromString('http://localhost/').child(r'') + tag = tags.invisible[loaders.xmlstr('')] + tag.fillSlots('src', u) +- self.assertEquals(flatten(tag), '') ++ self.assertEqual(flatten(tag), '') + + def test_safe(self): + u = url.URL.fromString('http://localhost/').child(r"foo-_.!*'()bar") +- self.assertEquals(flatten(tags.p[u]), r"

http://localhost/foo-_.!*'()bar

") ++ self.assertEqual(flatten(tags.p[u]), r"

http://localhost/foo-_.!*'()bar

") + + def test_urlintagwithmultipleamps(self): + """ +@@ -610,11 +610,11 @@ class Serialization(TestCase): + The ampersand must be quoted for the attribute to be valid. + """ + tag = tags.invisible[tags.a(href=url.URL.fromString('http://localhost/').add('foo', 'bar').add('baz', 'spam'))] +- self.assertEquals(flatten(tag), '') ++ self.assertEqual(flatten(tag), '') + + tag = tags.invisible[loaders.xmlstr('')] + tag.fillSlots('href', url.URL.fromString('http://localhost/').add('foo', 'bar').add('baz', 'spam')) +- self.assertEquals(flatten(tag), '') ++ self.assertEqual(flatten(tag), '') + + + def test_rfc1808(self): +@@ -623,7 +623,7 @@ class Serialization(TestCase): + base = url.URL.fromString(rfc1808_relative_link_base) + for link, result in rfc1808_relative_link_tests: + #print link +- self.failUnlessEqual(result, flatten(base.click(link))) ++ self.assertEqual(result, flatten(base.click(link))) + test_rfc1808.todo = 'Many of these fail miserably at the moment; often with a / where there shouldn\'t be' + + +@@ -632,9 +632,9 @@ class Serialization(TestCase): + L{URLSerializer} should provide basic IRI (RFC 3987) support by + encoding Unicode to UTF-8 before percent-encoding. + """ +- iri = u'http://localhost/expos\xe9?doppelg\xe4nger=Bryan O\u2019Sullivan#r\xe9sum\xe9' ++ iri = 'http://localhost/expos\xe9?doppelg\xe4nger=Bryan O\u2019Sullivan#r\xe9sum\xe9' + uri = 'http://localhost/expos%C3%A9?doppelg%C3%A4nger=Bryan%20O%E2%80%99Sullivan#r%C3%A9sum%C3%A9' +- self.assertEquals(flatten(url.URL.fromString(iri)), uri) ++ self.assertEqual(flatten(url.URL.fromString(iri)), uri) + + + +@@ -652,17 +652,19 @@ class RedirectResource(TestCase): + def test_urlRedirect(self): + u = "http://localhost/" + D = self.renderResource(url.URL.fromString(u)) +- def after((html, redirected_to)): ++ def after(xxx_todo_changeme): ++ (html, redirected_to) = xxx_todo_changeme + self.assertIn(u, html) +- self.assertEquals(u, redirected_to) ++ self.assertEqual(u, redirected_to) + return D.addCallback(after) + + + def test_urlRedirectWithParams(self): + D = self.renderResource(url.URL.fromString("http://localhost/").child('child').add('foo', 'bar')) +- def after((html, redirected_to)): ++ def after(xxx_todo_changeme1): ++ (html, redirected_to) = xxx_todo_changeme1 + self.assertIn("http://localhost/child?foo=bar", html) +- self.assertEquals("http://localhost/child?foo=bar", redirected_to) ++ self.assertEqual("http://localhost/child?foo=bar", redirected_to) + return D.addCallback(after) + + +@@ -671,16 +673,18 @@ class RedirectResource(TestCase): + url.URL.fromString("http://localhost/") + .child(util.succeed('child')).add('foo',util.succeed('bar')) + ) +- def after((html, redirected_to)): ++ def after(xxx_todo_changeme2): ++ (html, redirected_to) = xxx_todo_changeme2 + self.assertIn("http://localhost/child?foo=bar", html) +- self.assertEquals("http://localhost/child?foo=bar", redirected_to) ++ self.assertEqual("http://localhost/child?foo=bar", redirected_to) + return D.addCallback(after) + + + def test_deferredURLOverlayParam(self): + D = self.renderResource(url.here.child(util.succeed('child')).add('foo',util.succeed('bar'))) +- def after((html, redirected_to)): ++ def after(xxx_todo_changeme3): ++ (html, redirected_to) = xxx_todo_changeme3 + self.assertIn("http://localhost/child?foo=bar", html) +- self.assertEquals("http://localhost/child?foo=bar", redirected_to) ++ self.assertEqual("http://localhost/child?foo=bar", redirected_to) + return D.addCallback(after) + +--- nevow/testutil.py.orig 2016-02-17 12:51:40 UTC ++++ nevow/testutil.py +@@ -118,7 +118,7 @@ class FakeRequest(Componentized): + self.site = FakeSite() + self.requestHeaders = Headers() + if headers: +- for k, v in headers.iteritems(): ++ for k, v in headers.items(): + self.requestHeaders.setRawHeaders(k, [v]) + if cookies is not None: + self.cookies = cookies +@@ -515,7 +515,7 @@ Divmod.UnitTest.runRemote(Divmod.UnitTest.loadFromModu + def run(self, result): + try: + self.checkDependencies() +- except NotSupported, e: ++ except NotSupported as e: + result.startTest(self) + result.addSkip(self, str(e)) + result.stopTest(self) +@@ -596,7 +596,7 @@ class CSSModuleTestMixin: + return fname + + return athena.CSSRegistry( +- {u'TestCSSModuleDependencies': makeModule(), +- u'TestCSSModuleDependencies.Dependor': makeModule( ++ {'TestCSSModuleDependencies': makeModule(), ++ 'TestCSSModuleDependencies.Dependor': makeModule( + '// import TestCSSModuleDependencies.Dependee\n'), +- u'TestCSSModuleDependencies.Dependee': makeModule()}) ++ 'TestCSSModuleDependencies.Dependee': makeModule()}) diff --git a/www/py-notebook/Makefile b/www/py-notebook/Makefile index dfa7bc2f514..1cb84ebb2b9 100644 --- a/www/py-notebook/Makefile +++ b/www/py-notebook/Makefile @@ -1,5 +1,5 @@ PORTNAME= notebook -PORTVERSION= 6.4.6 +PORTVERSION= 6.4.8 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-notebook/distinfo b/www/py-notebook/distinfo index 1aad9d42419..d2f56759ba0 100644 --- a/www/py-notebook/distinfo +++ b/www/py-notebook/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1641046848 -SHA256 (notebook-6.4.6.tar.gz) = 7bcdf79bd1cda534735bd9830d2cbedab4ee34d8fe1df6e7b946b3aab0902ba3 -SIZE (notebook-6.4.6.tar.gz) = 14342345 +TIMESTAMP = 1647264994 +SHA256 (notebook-6.4.8.tar.gz) = 1e985c9dc6f678bdfffb9dc657306b5469bfa62d73e03f74e8defbf76d284312 +SIZE (notebook-6.4.8.tar.gz) = 14347639 diff --git a/www/py-puppetboard/Makefile b/www/py-puppetboard/Makefile index 3cfc4d34119..adc568dcf8b 100644 --- a/www/py-puppetboard/Makefile +++ b/www/py-puppetboard/Makefile @@ -1,5 +1,5 @@ PORTNAME= puppetboard -PORTVERSION= 3.3.0 +PORTVERSION= 3.4.1 DISTVERSIONPREFIX= v CATEGORIES= www python PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-puppetboard/distinfo b/www/py-puppetboard/distinfo index ca72ed320b6..6f655f6261d 100644 --- a/www/py-puppetboard/distinfo +++ b/www/py-puppetboard/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1638552438 -SHA256 (voxpupuli-puppetboard-v3.3.0_GH0.tar.gz) = 20d132480a6e49d2ca3572e6c65080b60daa12e330dc7d662d4bf438501b1f27 -SIZE (voxpupuli-puppetboard-v3.3.0_GH0.tar.gz) = 3555602 +TIMESTAMP = 1648413757 +SHA256 (voxpupuli-puppetboard-v3.4.1_GH0.tar.gz) = 56561b1e8699d543881a9883f397b722dd4afa4d695551ec160c61506225dfac +SIZE (voxpupuli-puppetboard-v3.4.1_GH0.tar.gz) = 4559085 diff --git a/www/py-py-restclient/files/patch-2to3 b/www/py-py-restclient/files/patch-2to3 new file mode 100644 index 00000000000..0698237eb7a --- /dev/null +++ b/www/py-py-restclient/files/patch-2to3 @@ -0,0 +1,312 @@ +--- restclient/bin/rest_cli.py.orig 2009-08-19 17:00:34 UTC ++++ restclient/bin/rest_cli.py +@@ -17,12 +17,12 @@ + import os + import sys + from optparse import OptionParser, OptionGroup +-import urlparse +-import urllib ++import urllib.parse ++import urllib.request, urllib.parse, urllib.error + + # python 2.6 and above compatibility + try: +- from urlparse import parse_qs as _parse_qs ++ from urllib.parse import parse_qs as _parse_qs + except ImportError: + from cgi import parse_qs as _parse_qs + +@@ -31,7 +31,7 @@ from restclient.transport import useCurl, CurlTranspor + + class Url(object): + def __init__(self, string): +- parts = urlparse.urlsplit(urllib.unquote(string)) ++ parts = urllib.parse.urlsplit(urllib.parse.unquote(string)) + if parts[0] != 'http' and parts[0] != 'https': + raise ValueError('Invalid url: %s.' % string) + +@@ -74,8 +74,8 @@ def make_query(string, method='GET', fname=None, + list_headers=None, output=None, proxy=None): + try: + uri = Url(string) +- except ValueError, e: +- print >>sys.stderr, e ++ except ValueError as e: ++ print(e, file=sys.stderr) + return + + transport = None +@@ -84,7 +84,7 @@ def make_query(string, method='GET', fname=None, + try: + proxy_url = Url(proxy) + except: +- print >>sys.stderr, "proxy url is invalid" ++ print("proxy url is invalid", file=sys.stderr) + return + proxy_infos = { "proxy_host": proxy_url.hostname } + if proxy_url.port is not None: +@@ -132,7 +132,7 @@ def make_query(string, method='GET', fname=None, + f.write(data) + f.close() + except: +- print >>sys.stderr, "Can't save result in %s" % output ++ print("Can't save result in %s" % output, file=sys.stderr) + return + + +--- restclient/errors.py.orig 2009-08-19 17:00:34 UTC ++++ restclient/errors.py +@@ -38,7 +38,7 @@ class ResourceError(Exception): + return self.msg + try: + return self._fmt % self.__dict__ +- except (NameError, ValueError, KeyError), e: ++ except (NameError, ValueError, KeyError) as e: + return 'Unprintable exception %s: %s' \ + % (self.__class__.__name__, str(e)) + +--- restclient/rest.py.orig 2009-08-23 09:33:14 UTC ++++ restclient/rest.py +@@ -57,9 +57,9 @@ This module provide a common interface for all HTTP eq + import cgi + import mimetypes + import os +-import StringIO ++import io + import types +-import urllib ++import urllib.request, urllib.parse, urllib.error + + try: + import chardet +@@ -337,13 +337,13 @@ class RestClient(object): + except IOError: + pass + size = int(os.fstat(body.fileno())[6]) +- elif isinstance(body, types.StringTypes): ++ elif isinstance(body, (str,)): + size = len(body) + body = to_bytestring(body) + elif isinstance(body, dict): + _headers.setdefault('Content-Type', "application/x-www-form-urlencoded; charset=utf-8") + body = form_encode(body) +- print body ++ print(body) + size = len(body) + else: + raise RequestError('Unable to calculate ' +@@ -360,7 +360,7 @@ class RestClient(object): + try: + resp, data = self.transport.request(self.make_uri(uri, path, **params), + method=method, body=body, headers=_headers) +- except TransportError, e: ++ except TransportError as e: + raise RequestError(str(e)) + + self.status = status_code = resp.status +@@ -444,7 +444,7 @@ class RestClient(object): + _path = [] + trailing_slash = False + for s in path: +- if s is not None and isinstance(s, basestring): ++ if s is not None and isinstance(s, str): + if len(s) > 1 and s.endswith('/'): + trailing_slash = True + else: +@@ -463,7 +463,7 @@ class RestClient(object): + retval.append(path_str) + + params = [] +- for k, v in query.items(): ++ for k, v in list(query.items()): + if type(v) in (list, tuple): + params.extend([(k, i) for i in v if i is not None]) + elif v is not None: +@@ -478,16 +478,16 @@ class RestClient(object): + + def url_quote(s, charset='utf-8', safe='/:'): + """URL encode a single string with a given encoding.""" +- if isinstance(s, unicode): ++ if isinstance(s, str): + s = s.encode(charset) + elif not isinstance(s, str): + s = str(s) +- return urllib.quote(s, safe=safe) ++ return urllib.parse.quote(s, safe=safe) + + def url_encode(obj, charset="utf8", encode_keys=False): + if isinstance(obj, dict): + items = [] +- for k, v in obj.iteritems(): ++ for k, v in obj.items(): + if not isinstance(v, (tuple, list)): + v = [v] + items.append((k, v)) +@@ -496,7 +496,7 @@ def url_encode(obj, charset="utf8", encode_keys=False) + + tmp = [] + for key, values in items: +- if encode_keys and isinstance(key, unicode): ++ if encode_keys and isinstance(key, str): + key = key.encode(charset) + else: + key = str(key) +@@ -504,18 +504,18 @@ def url_encode(obj, charset="utf8", encode_keys=False) + for value in values: + if value is None: + continue +- elif isinstance(value, unicode): ++ elif isinstance(value, str): + value = value.encode(charset) + else: + value = str(value) +- tmp.append('%s=%s' % (urllib.quote(key), +- urllib.quote_plus(value))) ++ tmp.append('%s=%s' % (urllib.parse.quote(key), ++ urllib.parse.quote_plus(value))) + + return '&'.join(tmp) + + def form_encode(obj, charser="utf8"): + tmp = [] +- for key, value in obj.items(): ++ for key, value in list(obj.items()): + tmp.append("%s=%s" % (url_quote(key), + url_quote(value))) + return to_bytestring("&".join(tmp)) +@@ -596,39 +596,39 @@ def _getCharacterEncoding(http_headers, xml_data): + elif xml_data[:4] == '\x00\x3c\x00\x3f': + # UTF-16BE + sniffed_xml_encoding = 'utf-16be' +- xml_data = unicode(xml_data, 'utf-16be').encode('utf-8') ++ xml_data = str(xml_data, 'utf-16be').encode('utf-8') + elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') and (xml_data[2:4] != '\x00\x00'): + # UTF-16BE with BOM + sniffed_xml_encoding = 'utf-16be' +- xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8') ++ xml_data = str(xml_data[2:], 'utf-16be').encode('utf-8') + elif xml_data[:4] == '\x3c\x00\x3f\x00': + # UTF-16LE + sniffed_xml_encoding = 'utf-16le' +- xml_data = unicode(xml_data, 'utf-16le').encode('utf-8') ++ xml_data = str(xml_data, 'utf-16le').encode('utf-8') + elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and (xml_data[2:4] != '\x00\x00'): + # UTF-16LE with BOM + sniffed_xml_encoding = 'utf-16le' +- xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8') ++ xml_data = str(xml_data[2:], 'utf-16le').encode('utf-8') + elif xml_data[:4] == '\x00\x00\x00\x3c': + # UTF-32BE + sniffed_xml_encoding = 'utf-32be' +- xml_data = unicode(xml_data, 'utf-32be').encode('utf-8') ++ xml_data = str(xml_data, 'utf-32be').encode('utf-8') + elif xml_data[:4] == '\x3c\x00\x00\x00': + # UTF-32LE + sniffed_xml_encoding = 'utf-32le' +- xml_data = unicode(xml_data, 'utf-32le').encode('utf-8') ++ xml_data = str(xml_data, 'utf-32le').encode('utf-8') + elif xml_data[:4] == '\x00\x00\xfe\xff': + # UTF-32BE with BOM + sniffed_xml_encoding = 'utf-32be' +- xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8') ++ xml_data = str(xml_data[4:], 'utf-32be').encode('utf-8') + elif xml_data[:4] == '\xff\xfe\x00\x00': + # UTF-32LE with BOM + sniffed_xml_encoding = 'utf-32le' +- xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8') ++ xml_data = str(xml_data[4:], 'utf-32le').encode('utf-8') + elif xml_data[:3] == '\xef\xbb\xbf': + # UTF-8 with BOM + sniffed_xml_encoding = 'utf-8' +- xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8') ++ xml_data = str(xml_data[3:], 'utf-8').encode('utf-8') + else: + # ASCII-compatible + pass +@@ -652,7 +652,7 @@ def _getCharacterEncoding(http_headers, xml_data): + true_encoding = http_encoding or 'us-ascii' + elif http_content_type.startswith('text/'): + true_encoding = http_encoding or 'us-ascii' +- elif http_headers and (not http_headers.has_key('content-type')): ++ elif http_headers and ('content-type' not in http_headers): + true_encoding = xml_encoding or 'iso-8859-1' + else: + true_encoding = xml_encoding or 'utf-8' +--- restclient/transport/_curl.py.orig 2009-08-19 17:00:34 UTC ++++ restclient/transport/_curl.py +@@ -20,7 +20,7 @@ curl transport + """ + + import re +-import StringIO ++import io + import sys + + +@@ -36,7 +36,7 @@ except ImportError: + + NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') + def _normalize_headers(headers): +- return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) ++ return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.items()]) + + + def _get_pycurl_errcode(symbol, default): +@@ -164,8 +164,8 @@ class CurlTransport(HTTPTransportBase): + else: # no timeout by default + c.setopt(pycurl.TIMEOUT, 0) + +- data = StringIO.StringIO() +- header = StringIO.StringIO() ++ data = io.StringIO() ++ header = io.StringIO() + c.setopt(pycurl.WRITEFUNCTION, data.write) + c.setopt(pycurl.HEADERFUNCTION, header.write) + c.setopt(pycurl.URL, url) +@@ -226,7 +226,7 @@ class CurlTransport(HTTPTransportBase): + content = body + else: + body = to_bytestring(body) +- content = StringIO.StringIO(body) ++ content = io.StringIO(body) + if 'Content-Length' in headers: + del headers['Content-Length'] + content_length = len(body) +@@ -240,14 +240,14 @@ class CurlTransport(HTTPTransportBase): + if headers: + _normalize_headers(headers) + c.setopt(pycurl.HTTPHEADER, +- ["%s: %s" % pair for pair in sorted(headers.iteritems())]) ++ ["%s: %s" % pair for pair in sorted(headers.items())]) + + try: + c.perform() +- except pycurl.error, e: ++ except pycurl.error as e: + if e[0] != CURLE_SEND_ERROR: + if restclient.debuglevel > 0: +- print >>sys.stderr, str(e) ++ print(str(e), file=sys.stderr) + raise TransportError(e) + + response_headers = self._parseHeaders(header) +--- restclient/transport/base.py.orig 2009-08-19 17:00:34 UTC ++++ restclient/transport/base.py +@@ -46,7 +46,7 @@ class HTTPResponse(dict): + reason = "Ok" + + def __init__(self, info): +- for key, value in info.iteritems(): ++ for key, value in info.items(): + self[key] = value + self.status = int(self.get('status', self.status)) + self.final_url = self.get('final_url', self.final_url) +@@ -55,7 +55,7 @@ class HTTPResponse(dict): + if name == 'dict': + return self + else: +- raise AttributeError, name ++ raise AttributeError(name) + + def __repr__(self): + return "<%s status %s for %s>" % (self.__class__.__name__, diff --git a/www/py-pysmartdl/Makefile b/www/py-pysmartdl/Makefile index 168e45a83d5..d2f7986358b 100644 --- a/www/py-pysmartdl/Makefile +++ b/www/py-pysmartdl/Makefile @@ -2,6 +2,7 @@ PORTNAME= pysmartdl PORTVERSION= 1.3.4 +PORTREVISION= 1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-pysmartdl/files/patch-setup.py b/www/py-pysmartdl/files/patch-setup.py new file mode 100644 index 00000000000..b91914dc670 --- /dev/null +++ b/www/py-pysmartdl/files/patch-setup.py @@ -0,0 +1,11 @@ +--- setup.py.orig 2020-09-19 15:39:42 UTC ++++ setup.py +@@ -11,7 +11,7 @@ setup( + author='Itay Brandes', + author_email='brandes.itay+pysmartdl@gmail.com', + license='Public Domain', +- packages=find_packages(), ++ packages=find_packages(exclude=['test*']), + description='A Smart Download Manager for Python', + long_description=open('README.md').read(), + test_suite = "test.test_pySmartDL.test_suite", diff --git a/www/py-quilt3/Makefile b/www/py-quilt3/Makefile index 6720ec318b1..55814f3d3a1 100644 --- a/www/py-quilt3/Makefile +++ b/www/py-quilt3/Makefile @@ -2,7 +2,7 @@ PORTNAME= quilt3 PORTVERSION= 3.6.0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} diff --git a/www/py-quilt3/files/patch-setup.py b/www/py-quilt3/files/patch-setup.py index 8e83be53a15..9758b87a25d 100644 --- a/www/py-quilt3/files/patch-setup.py +++ b/www/py-quilt3/files/patch-setup.py @@ -1,19 +1,11 @@ --- setup.py.orig 2021-10-15 18:41:22 UTC +++ setup.py -@@ -64,13 +64,13 @@ setup( - 'flask', - 'flask_cors', - 'flask_json', -- 'jsonlines==1.2.0', -+ 'jsonlines>=1.2.0', - 'PyYAML>=5.1', - 'requests>=2.12.4', - 'tenacity>=5.1.1', - 'tqdm>=4.32', -- 'requests_futures==1.0.0', -- 'jsonschema==3.*', -+ 'requests_futures>=1.0.0', -+ 'jsonschema>=3,<5', - ], - extras_require={ - 'pyarrow': [ +@@ -38,7 +38,7 @@ class VerifyVersionCommand(install): + setup( + name="quilt3", + version=VERSION, +- packages=find_packages(), ++ packages=find_packages(exclude=['tests*']), + description='Quilt: where data comes together', + long_description=readme(), + python_requires='>=3.6', diff --git a/www/py-requests-oauthlib/Makefile b/www/py-requests-oauthlib/Makefile index 2ff98acd5a6..53e2353f552 100644 --- a/www/py-requests-oauthlib/Makefile +++ b/www/py-requests-oauthlib/Makefile @@ -1,7 +1,7 @@ # Created by: Kubilay Kocak PORTNAME= requests-oauthlib -PORTVERSION= 0.6.2 +PORTVERSION= 1.3.1 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -10,17 +10,19 @@ MAINTAINER= sunpoet@FreeBSD.org COMMENT= OAuthlib authentication support for Requests LICENSE= ISCL +LICENSE_FILE= ${WRKSRC}/LICENSE -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}oauthlib>=0.6.2:security/py-oauthlib@${PY_FLAVOR} \ +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}oauthlib>=3.0.0:security/py-oauthlib@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}requests>=2.0.0:www/py-requests@${PY_FLAVOR} -TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}mock>0:devel/py-mock@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}mock>=0:devel/py-mock@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}requests-mock>=0:www/py-requests-mock@${PY_FLAVOR} USES= python:3.6+ -USE_PYTHON= autoplist distutils +USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes do-test: - @cd ${WRKSRC} && ${PYTHON_CMD} ${PYDISTUTILS_SETUP} test + cd ${WRKSRC} && ${PYTHON_CMD} -m unittest -v .include diff --git a/www/py-requests-oauthlib/distinfo b/www/py-requests-oauthlib/distinfo index e73f089cc27..52bf7bce73c 100644 --- a/www/py-requests-oauthlib/distinfo +++ b/www/py-requests-oauthlib/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1468874436 -SHA256 (requests-oauthlib-0.6.2.tar.gz) = 161ec8aaa360befac7079bcf20dc2a3993d1ddef19bc21d8118232a98f716e7a -SIZE (requests-oauthlib-0.6.2.tar.gz) = 76972 +TIMESTAMP = 1647264714 +SHA256 (requests-oauthlib-1.3.1.tar.gz) = 75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a +SIZE (requests-oauthlib-1.3.1.tar.gz) = 52027 diff --git a/www/py-requests-oauthlib/pkg-descr b/www/py-requests-oauthlib/pkg-descr index 6b66c574163..46a6d9deb48 100644 --- a/www/py-requests-oauthlib/pkg-descr +++ b/www/py-requests-oauthlib/pkg-descr @@ -1,6 +1,6 @@ This project provides first-class OAuth library support for Requests. -OAuth can seem overly complicated and it sure has its quirks. Luckily, +OAuth 1 can seem overly complicated and it sure has its quirks. Luckily, requests_oauthlib hides most of these and let you focus at the task at hand. WWW: https://github.com/requests/requests-oauthlib diff --git a/www/py-restclient/files/patch-2to3 b/www/py-restclient/files/patch-2to3 new file mode 100644 index 00000000000..87ce7330c12 --- /dev/null +++ b/www/py-restclient/files/patch-2to3 @@ -0,0 +1,377 @@ +--- restclient/__init__.py.orig 2012-11-17 20:17:26 UTC ++++ restclient/__init__.py +@@ -84,10 +84,10 @@ CHANGESET: + + import httplib2 + import mimetypes +-import thread ++import _thread + import types +-import urllib +-import urllib2 ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse + try: + import json + except ImportError: +@@ -168,7 +168,7 @@ def get_content_type(filename): + return mimetypes.guess_type(filename)[0] or 'application/octet-stream' + + +-def GET(url, params=None, files=None, accept=[], headers=None, async=False, ++def GET(url, params=None, files=None, accept=[], headers=None, asynchronous=False, + resp=False, credentials=None, httplib_params=None): + """ make an HTTP GET request. + +@@ -178,20 +178,20 @@ def GET(url, params=None, files=None, accept=[], heade + in addition, parameters and headers can be specified (as dicts). a + list of mimetypes to accept may be specified. + +- if async=True is passed in, it will perform the request in a new ++ if asynchronous=True is passed in, it will perform the request in a new + thread and immediately return nothing. + + if resp=True is passed in, it will return a tuple of an httplib2 + response object and the content instead of just the content. + """ +- return rest_invoke(url=url, method=u"GET", params=params, ++ return rest_invoke(url=url, method="GET", params=params, + files=files, accept=accept, headers=headers, +- async=async, resp=resp, credentials=credentials, ++ asynchronous=async, resp=resp, credentials=credentials, + httplib_params=httplib_params) + + + def POST(url, params=None, files=None, accept=[], headers=None, +- async=True, resp=False, credentials=None, httplib_params=None): ++ asynchronous=True, resp=False, credentials=None, httplib_params=None): + """ make an HTTP POST request. + + performs a POST request to the specified URL. +@@ -209,19 +209,19 @@ def POST(url, params=None, files=None, accept=[], head + (nothing) immediately. + + To wait for the response and have it return the body of the +- response, specify async=False. ++ response, specify asynchronous=False. + + if resp=True is passed in, it will return a tuple of an httplib2 + response object and the content instead of just the content. + """ +- return rest_invoke(url=url, method=u"POST", params=params, ++ return rest_invoke(url=url, method="POST", params=params, + files=files, accept=accept, headers=headers, +- async=async, resp=resp, credentials=credentials, ++ asynchronous=async, resp=resp, credentials=credentials, + httplib_params=httplib_params) + + + def PUT(url, params=None, files=None, accept=[], headers=None, +- async=True, resp=False, credentials=None, httplib_params=None): ++ asynchronous=True, resp=False, credentials=None, httplib_params=None): + """ make an HTTP PUT request. + + performs a PUT request to the specified URL. +@@ -239,20 +239,20 @@ def PUT(url, params=None, files=None, accept=[], heade + (nothing) immediately. + + To wait for the response and have it return the body of the +- response, specify async=False. ++ response, specify asynchronous=False. + + if resp=True is passed in, it will return a tuple of an httplib2 + response object and the content instead of just the content. + """ + +- return rest_invoke(url=url, method=u"PUT", params=params, ++ return rest_invoke(url=url, method="PUT", params=params, + files=files, accept=accept, headers=headers, +- async=async, resp=resp, credentials=credentials, ++ asynchronous=async, resp=resp, credentials=credentials, + httplib_params=httplib_params) + + + def DELETE(url, params=None, files=None, accept=[], headers=None, +- async=True, resp=False, credentials=None, ++ asynchronous=True, resp=False, credentials=None, + httplib_params=None): + """ make an HTTP DELETE request. + +@@ -265,20 +265,20 @@ def DELETE(url, params=None, files=None, accept=[], he + returns (nothing) immediately. + + To wait for the response and have it return the body of the +- response, specify async=False. ++ response, specify asynchronous=False. + + if resp=True is passed in, it will return a tuple of an httplib2 + response object and the content instead of just the content. + """ + +- return rest_invoke(url=url, method=u"DELETE", params=params, ++ return rest_invoke(url=url, method="DELETE", params=params, + files=files, accept=accept, headers=headers, +- async=async, resp=resp, credentials=credentials, ++ asynchronous=async, resp=resp, credentials=credentials, + httplib_params=httplib_params) + + +-def rest_invoke(url, method=u"GET", params=None, files=None, +- accept=[], headers=None, async=False, resp=False, ++def rest_invoke(url, method="GET", params=None, files=None, ++ accept=[], headers=None, asynchronous=False, resp=False, + httpcallback=None, credentials=None, + httplib_params=None): + """ make an HTTP request with all the trimmings. +@@ -315,7 +315,7 @@ def rest_invoke(url, method=u"GET", params=None, files + accept: list of mimetypes to accept in order of + preference. defaults to '*/*' + headers: dictionary of additional headers to send to the server +- async: Boolean. if true, does request in new thread and nothing is ++ asynchronous: Boolean. if true, does request in new thread and nothing is + returned + resp: Boolean. if true, returns a tuple of response, + content. otherwise returns just content +@@ -325,8 +325,8 @@ def rest_invoke(url, method=u"GET", params=None, files + httplib_params: dict of parameters supplied to httplib2 - for + example ca_certs='/etc/ssl/certs/ca-certificates.crt' + """ +- if async: +- thread.start_new_thread(_rest_invoke, ++ if asynchronous: ++ _thread.start_new_thread(_rest_invoke, + (url, method, params, files, accept, + headers, resp, httpcallback, credentials, + httplib_params)) +@@ -335,7 +335,7 @@ def rest_invoke(url, method=u"GET", params=None, files + resp, httpcallback, credentials, httplib_params) + + +-def _rest_invoke(url, method=u"GET", params=None, files=None, accept=None, ++def _rest_invoke(url, method="GET", params=None, files=None, accept=None, + headers=None, resp=False, httpcallback=None, + credentials=None, httplib_params=None): + if params is None: +@@ -363,7 +363,7 @@ def _rest_invoke(url, method=u"GET", params=None, file + headers[k] = v + + if httpcallback.username or httpcallback.password: +- print "warning: restclient can't handle HTTP auth yet" ++ print("warning: restclient can't handle HTTP auth yet") + if httpcallback.redirections != 5: + print ("warning: restclient doesn't support " + "HTTPCallback's restrictions yet") +@@ -371,18 +371,18 @@ def _rest_invoke(url, method=u"GET", params=None, file + print ("warning: restclient doesn't support " + "HTTPCallback's follow_all_redirects_yet") + if httpcallback.body != "": +- print "warning: restclient doesn't support HTTPCallback's body yet" ++ print("warning: restclient doesn't support HTTPCallback's body yet") + + headers = add_accepts(accept, headers) + if method in ['POST', 'PUT'] and 'Content-Type' not in headers: + headers['Content-Type'] = 'application/x-www-form-urlencoded' +- params = urllib.urlencode(fix_params(params)) ++ params = urllib.parse.urlencode(fix_params(params)) + elif (method in ['POST', 'PUT'] and + headers['Content-Type'] == 'application/json'): + params = json.dumps(params) + else: + # GET and DELETE requests +- params = urllib.urlencode(fix_params(params)) ++ params = urllib.parse.urlencode(fix_params(params)) + + if files: + return post_multipart(extract_host(url), extract_path(url), +@@ -463,11 +463,11 @@ def my_urlparse(url): + + + def unpack_params(params): +- return [(k, params[k]) for k in params.keys()] ++ return [(k, params[k]) for k in list(params.keys())] + + + def unpack_files(files): +- return [(k, files[k]['filename'], files[k]['file']) for k in files.keys()] ++ return [(k, files[k]['filename'], files[k]['file']) for k in list(files.keys())] + + + def add_accepts(accept=None, headers=None): +@@ -485,8 +485,8 @@ def add_accepts(accept=None, headers=None): + def fix_params(params=None): + if params is None: + params = {} +- for k in params.keys(): +- if type(k) not in types.StringTypes: ++ for k in list(params.keys()): ++ if type(k) not in (str,): + new_k = str(k) + params[new_k] = params[k] + del params[k] +@@ -500,8 +500,8 @@ def fix_params(params=None): + except UnicodeDecodeError: + pass + +- for k in params.keys(): +- if type(params[k]) not in types.StringTypes: ++ for k in list(params.keys()): ++ if type(params[k]) not in (str,): + params[k] = str(params[k]) + try: + params[k].encode('ascii') +@@ -517,12 +517,12 @@ def fix_params(params=None): + def fix_headers(headers=None): + if headers is None: + headers = {} +- for k in headers.keys(): +- if type(k) not in types.StringTypes: ++ for k in list(headers.keys()): ++ if type(k) not in (str,): + new_k = str(k) + headers[new_k] = headers[k] + del headers[k] +- if type(headers[k]) not in types.StringTypes: ++ if type(headers[k]) not in (str,): + headers[k] = str(headers[k]) + try: + headers[k].encode('ascii') +@@ -539,8 +539,8 @@ def fix_files(files=None): + if files is None: + files = {} + # fix keys in files +- for k in files.keys(): +- if type(k) not in types.StringTypes: ++ for k in list(files.keys()): ++ if type(k) not in (str,): + new_k = str(k) + files[new_k] = files[k] + del files[k] +@@ -551,7 +551,7 @@ def fix_files(files=None): + files[new_k] = files[k] + del files[k] + # second pass to fix filenames +- for k in files.keys(): ++ for k in list(files.keys()): + try: + files[k]['filename'].encode('ascii') + except UnicodeEncodeError: +@@ -560,15 +560,15 @@ def fix_files(files=None): + + + if __name__ == "__main__": +- print rest_invoke("http://localhost:9090/", ++ print(rest_invoke("http://localhost:9090/", + method="POST", params={'value': 'store this'}, +- accept=["text/plain", "text/html"], async=False) ++ accept=["text/plain", "text/html"], asynchronous=False)) + image = open('sample.jpg').read() + r = rest_invoke("http://resizer.ccnmtl.columbia.edu/resize", + method="POST", + files={'image': {'file': image, + 'filename': 'sample.jpg'}}, +- async=False) ++ asynchronous=False) + out = open("thumb.jpg", "w") + out.write(r) + out.close() +@@ -576,13 +576,13 @@ if __name__ == "__main__": + r = POST("http://resizer.ccnmtl.columbia.edu/resize", + files={'image': {'file': image, + 'filename': 'sample.jpg'}}, +- async=False) ++ asynchronous=False) + # evil unicode tests +- print rest_invoke(u"http://localhost:9090/foo/", +- params={u'foo\u2012': u'\u2012'}, +- headers={u"foo\u2012": u"foo\u2012"}) ++ print(rest_invoke("http://localhost:9090/foo/", ++ params={'foo\u2012': '\u2012'}, ++ headers={"foo\u2012": "foo\u2012"})) + +- r = rest_invoke(u"http://localhost:9090/resize", method="POST", +- files={u'image\u2012': {'file': image, +- 'filename': u'samp\u2012le.jpg'}}, +- async=False) ++ r = rest_invoke("http://localhost:9090/resize", method="POST", ++ files={'image\u2012': {'file': image, ++ 'filename': 'samp\u2012le.jpg'}}, ++ asynchronous=False) +--- restclient/test/test_everything.py.orig 2012-11-17 20:17:26 UTC ++++ restclient/test/test_everything.py +@@ -39,7 +39,7 @@ on each request it handles, please submit a patch. + + from restclient import * + import threading, os +-import BaseHTTPServer ++import http.server + import cgi + + port_num = int(os.environ.get('RESTCLIENT_TEST_PORT',11123)) +@@ -47,12 +47,12 @@ hostname = "http://localhost:%d/" % port_num + image = open('sample.jpg').read() + + def start_server(callback): +- class LoopbackHandler(BaseHTTPServer.BaseHTTPRequestHandler): ++ class LoopbackHandler(http.server.BaseHTTPRequestHandler): + """ a simple http server that will basically echo back the request + that was made to it """ + def respond(self): +- s = self.requestline + u"\n" \ +- + str(self.headers) + u"\n\n" \ ++ s = self.requestline + "\n" \ ++ + str(self.headers) + "\n\n" \ + + self.body() + + response = s.encode('utf-8') +@@ -82,7 +82,7 @@ def start_server(callback): + + def run(): + """ start the server for a single request """ +- server_class=BaseHTTPServer.HTTPServer ++ server_class=http.server.HTTPServer + handler_class=LoopbackHandler + server_address = ('', port_num) + httpd = server_class(server_address, handler_class) +@@ -116,7 +116,7 @@ def test_get(): + @servify + def test_post(): + expected = "POST\nvalue: store this\nDONE\n" +- r = POST(hostname, params={'value' : 'store this'}, accept=["text/plain","text/html"], async=False) ++ r = POST(hostname, params={'value' : 'store this'}, accept=["text/plain","text/html"], asynchronous=False) + assert r.startswith('POST /') + assert "value=store+this" in r + assert "accept: text/plain,text/html" in r +@@ -124,25 +124,25 @@ def test_post(): + @servify + def test_post_image(): + result = POST(hostname + "resize", files={'image' : {'file' : image, 'filename' : 'sample.jpg'}}, +- async=False) ++ asynchronous=False) + assert result.startswith('POST /resize') + assert "multipart" in result + + @servify + def test_get_unicode(): +- expected = u"GET\nfoo\u2012: \u2012\nDONE\n".encode('utf-8') +- r = GET(unicode(hostname + "foo/"),params={u'foo\u2012' : u'\u2012'}, +- headers={u"foo\u2012" : u"foo\u2012"}) ++ expected = "GET\nfoo\u2012: \u2012\nDONE\n".encode('utf-8') ++ r = GET(str(hostname + "foo/"),params={'foo\u2012' : '\u2012'}, ++ headers={"foo\u2012" : "foo\u2012"}) + # unicode in params gets urlencoded + assert r.startswith('GET /foo/?foo%E2%80%92=%E2%80%92') + # unicode in headers gets stripped out. they can only contain ascii. +- assert u"foo: foo" in r ++ assert "foo: foo" in r + + @servify + def test_post_unicode(): +- result = POST(unicode(hostname + "foo/"), +- params={u'foo\u2012' : u'\u2012'}, +- async=False) ++ result = POST(str(hostname + "foo/"), ++ params={'foo\u2012' : '\u2012'}, ++ asynchronous=False) + assert result.startswith('POST /foo/') + expected = "foo%E2%80%92=%E2%80%92" # urlencoded + assert expected in result diff --git a/www/py-sentinelhub/Makefile b/www/py-sentinelhub/Makefile index 32cd539a713..b019fba09d3 100644 --- a/www/py-sentinelhub/Makefile +++ b/www/py-sentinelhub/Makefile @@ -19,7 +19,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}aenum>=2.1.4:devel/py-aenum@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}dataclasses-json>=0:devel/py-dataclasses-json@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}numpy>=0,1:math/py-numpy@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}oauthlib>=0:security/py-oauthlib@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}pillow>=0:graphics/py-pillow@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pillow>=8.3.2<8.4.1:graphics/py-pillow@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}pyproj>=2.2.0:graphics/py-pyproj@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}dateutil>=0:devel/py-dateutil@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}requests>=2.5.0:www/py-requests@${PY_FLAVOR} \ diff --git a/www/py-sentinelhub/files/patch-requirements.txt b/www/py-sentinelhub/files/patch-requirements.txt deleted file mode 100644 index 1bd8aa7718b..00000000000 --- a/www/py-sentinelhub/files/patch-requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ ---- requirements.txt.orig 2022-01-05 07:25:52 UTC -+++ requirements.txt -@@ -3,7 +3,7 @@ requests>=2.5.0 - click - numpy - tifffile --pillow>=8.3.2,<=8.4.0 -+pillow - python-dateutil - utm - shapely diff --git a/www/py-starlette/Makefile b/www/py-starlette/Makefile index 19e1a9095a6..5a2a0c72199 100644 --- a/www/py-starlette/Makefile +++ b/www/py-starlette/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= starlette -PORTVERSION= 0.18.0 +PORTVERSION= 0.19.0 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,7 +12,7 @@ COMMENT= Lightweight ASGI framework/toolkit LICENSE= BSD3CLAUSE LICENSE_FILE= ${WRKSRC}/LICENSE.md -RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}anyio>=3.0.0<4:devel/py-anyio@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}anyio>=3.4.0<5:devel/py-anyio@${PY_FLAVOR} USES= python:3.7+ USE_PYTHON= autoplist concurrent distutils @@ -32,7 +32,7 @@ FULL_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}itsdangerous>=0:security/py-itsdangerou .include .if ${PYTHON_REL} < 31000 -RUN_DEPENDS+= ${PYTHON_PKGNAMEPREFIX}typing-extensions>=0:devel/py-typing-extensions@${PY_FLAVOR} +RUN_DEPENDS+= ${PYTHON_PKGNAMEPREFIX}typing-extensions>=3.10.0:devel/py-typing-extensions@${PY_FLAVOR} .endif .include diff --git a/www/py-starlette/distinfo b/www/py-starlette/distinfo index 2eee5168eec..d8112de673a 100644 --- a/www/py-starlette/distinfo +++ b/www/py-starlette/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1643133815 -SHA256 (starlette-0.18.0.tar.gz) = b45c6e9a617ecb5caf7e6446bd8d767b0084d6217e8e1b08187ca5191e10f097 -SIZE (starlette-0.18.0.tar.gz) = 49513 +TIMESTAMP = 1647264716 +SHA256 (starlette-0.19.0.tar.gz) = 4a1a92aa89dbacc3a4c694a2c112863e88449730ff99b421a9b71fb2213bcd9c +SIZE (starlette-0.19.0.tar.gz) = 49989 diff --git a/www/py-tornado4/files/patch-tornado-test-asyncio_test.py b/www/py-tornado4/files/patch-tornado-test-asyncio_test.py new file mode 100644 index 00000000000..d59c0a88b75 --- /dev/null +++ b/www/py-tornado4/files/patch-tornado-test-asyncio_test.py @@ -0,0 +1,12 @@ +--- tornado/test/asyncio_test.py.orig 2018-01-05 03:07:44 UTC ++++ tornado/test/asyncio_test.py +@@ -46,7 +46,8 @@ class AsyncIOLoopTest(AsyncTestCase): + if hasattr(asyncio, 'ensure_future'): + ensure_future = asyncio.ensure_future + else: +- ensure_future = asyncio.async ++ # async is a reserved word in Python 3.7 ++ ensure_future = getattr(asyncio, "async") + + x = yield ensure_future( + asyncio.get_event_loop().run_in_executor(None, lambda: 42)) diff --git a/www/py-uvicorn/Makefile b/www/py-uvicorn/Makefile index e3c00be72e2..068ad29c725 100644 --- a/www/py-uvicorn/Makefile +++ b/www/py-uvicorn/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= uvicorn -PORTVERSION= 0.17.5 +PORTVERSION= 0.17.6 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -15,7 +15,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE.md RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}asgiref>=3.4.0:www/py-asgiref@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}click>=7.0:devel/py-click@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}h11>=0.8:net/py-h11@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}httptools>=0.2.0<0.4.1:www/py-httptools@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}httptools>=0.4.0:www/py-httptools@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}python-dotenv>=0.13:www/py-python-dotenv@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}yaml>=5.1:devel/py-yaml@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}uvloop>=0.14.0:devel/py-uvloop@${PY_FLAVOR} \ diff --git a/www/py-uvicorn/distinfo b/www/py-uvicorn/distinfo index 7ff2938702b..e4f4235df73 100644 --- a/www/py-uvicorn/distinfo +++ b/www/py-uvicorn/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058172 -SHA256 (uvicorn-0.17.5.tar.gz) = c04a9c069111489c324f427501b3840d306c6b91a77b00affc136a840a3f45f1 -SIZE (uvicorn-0.17.5.tar.gz) = 39547 +TIMESTAMP = 1647264718 +SHA256 (uvicorn-0.17.6.tar.gz) = 5180f9d059611747d841a4a4c4ab675edf54c8489e97f96d0583ee90ac3bfc23 +SIZE (uvicorn-0.17.6.tar.gz) = 39534 diff --git a/www/py-uvicorn/files/patch-setup.py b/www/py-uvicorn/files/patch-setup.py deleted file mode 100644 index cea05aa656b..00000000000 --- a/www/py-uvicorn/files/patch-setup.py +++ /dev/null @@ -1,11 +0,0 @@ ---- setup.py.orig 2022-02-16 12:54:46 UTC -+++ setup.py -@@ -53,7 +53,7 @@ minimal_requirements = [ - - extra_requirements = [ - "websockets>=10.0", -- "httptools>=0.2.0,<0.4.0", -+ "httptools>=0.2.0,<0.4.1", - "uvloop>=0.14.0,!=0.15.0,!=0.15.1; " + env_marker_cpython, - "colorama>=0.4;" + env_marker_win, - "watchgod>=0.6", diff --git a/www/py-waitress/Makefile b/www/py-waitress/Makefile index bc92a4b8492..9b9707bfbd2 100644 --- a/www/py-waitress/Makefile +++ b/www/py-waitress/Makefile @@ -1,7 +1,7 @@ # Created by: Olivier Duchateau PORTNAME= waitress -PORTVERSION= 2.0.0 +PORTVERSION= 2.1.0 CATEGORIES= www python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -12,7 +12,7 @@ COMMENT= Python WSGI server LICENSE= ZPL21 LICENSE_FILE= ${WRKSRC}/LICENSE.txt -USES= cpe python:3.6+ +USES= cpe python:3.7+ USE_PYTHON= autoplist concurrent distutils NO_ARCH= yes diff --git a/www/py-waitress/distinfo b/www/py-waitress/distinfo index 731a6f4630b..d963be5ac47 100644 --- a/www/py-waitress/distinfo +++ b/www/py-waitress/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1618320316 -SHA256 (waitress-2.0.0.tar.gz) = 69e1f242c7f80273490d3403c3976f3ac3b26e289856936d1f620ed48f321897 -SIZE (waitress-2.0.0.tar.gz) = 175641 +TIMESTAMP = 1647264720 +SHA256 (waitress-2.1.0.tar.gz) = ec8a8d9b6b15f3bb2c1a82b8f3929a029c333c35fcafb08c185a9e562d8cc9c2 +SIZE (waitress-2.1.0.tar.gz) = 176665 diff --git a/www/py-webunit/files/patch-2to3 b/www/py-webunit/files/patch-2to3 new file mode 100644 index 00000000000..35fd5080f11 --- /dev/null +++ b/www/py-webunit/files/patch-2to3 @@ -0,0 +1,548 @@ +--- webunit/cookie.py.orig 2010-05-05 05:21:19 UTC ++++ webunit/cookie.py +@@ -1,4 +1,4 @@ +-import re, urlparse, Cookie ++import re, urllib.parse, http.cookies + + class Error: + '''Handles a specific cookie error. +@@ -33,7 +33,7 @@ def parse_cookie(text, qparmre=re.compile( + # We'll simply bail without raising an error + # if the cookie is invalid. + return result +- if not result.has_key(name): ++ if name not in result: + result[name]=value + return result + +@@ -45,13 +45,12 @@ def decodeCookies(url, server, headers, cookies): + http://www.ietf.org/rfc/rfc2965.txt + ''' + # the path of the request URL up to, but not including, the right-most / +- request_path = urlparse.urlparse(url)[2] ++ request_path = urllib.parse.urlparse(url)[2] + if len(request_path) > 1 and request_path[-1] == '/': + request_path = request_path[:-1] + +- hdrcookies = Cookie.SimpleCookie("\n".join(map(lambda x: x.strip(), +- headers.getallmatchingheaders('set-cookie')))) +- for cookie in hdrcookies.values(): ++ hdrcookies = http.cookies.SimpleCookie("\n".join([x.strip() for x in headers.getallmatchingheaders('set-cookie')])) ++ for cookie in list(hdrcookies.values()): + # XXX: there doesn't seem to be a way to determine if the + # cookie was set or defaulted to an empty string :( + if cookie['domain']: +@@ -60,7 +59,7 @@ def decodeCookies(url, server, headers, cookies): + # reject if The value for the Domain attribute contains no + # embedded dots or does not start with a dot. + if '.' not in domain: +- raise Error, 'Cookie domain "%s" has no "."'%domain ++ raise Error('Cookie domain "%s" has no "."'%domain) + if domain[0] != '.': + # per RFC2965 cookie domains with no leading '.' will have + # one added +@@ -73,16 +72,16 @@ def decodeCookies(url, server, headers, cookies): + # but not: + # - someexample.com + if not server.endswith(domain) and domain[1:] != server: +- raise Error, 'Cookie domain "%s" doesn\'t match '\ +- 'request host "%s"'%(domain, server) ++ raise Error('Cookie domain "%s" doesn\'t match '\ ++ 'request host "%s"'%(domain, server)) + # reject if the request-host is a FQDN (not IP address) and + # has the form HD, where D is the value of the Domain + # attribute, and H is a string that contains one or more dots. + if re.search(r'[a-zA-Z]', server): + H = server[:-len(domain)] + if '.' in H: +- raise Error, 'Cookie domain "%s" too short '\ +- 'for request host "%s"'%(domain, server) ++ raise Error('Cookie domain "%s" too short '\ ++ 'for request host "%s"'%(domain, server)) + else: + domain = server + +@@ -92,8 +91,8 @@ def decodeCookies(url, server, headers, cookies): + # (noting that empty request path and '/' are often synonymous, yay) + if not (request_path.startswith(path) or (request_path == '' and + cookie['path'] == '/')): +- raise Error, 'Cookie path "%s" doesn\'t match '\ +- 'request url "%s"'%(path, request_path) ++ raise Error('Cookie path "%s" doesn\'t match '\ ++ 'request url "%s"'%(path, request_path)) + + bydom = cookies.setdefault(domain, {}) + bypath = bydom.setdefault(path, {}) +--- webunit/HTMLParser.py.orig 2009-06-05 16:30:44 UTC ++++ webunit/HTMLParser.py +@@ -183,7 +183,7 @@ class HTMLParser: + else: + if i < n-1: + raise HTMLParseError( +- "invalid '<' construct: %s" % `rawdata[i:i+2]`, ++ "invalid '<' construct: %s" % repr(rawdata[i:i+2]), + self.getpos()) + k = -1 + if k < 0: +@@ -274,7 +274,7 @@ class HTMLParser: + j = m.end() + else: + raise HTMLParseError( +- "unexpected char in declaration: %s" % `rawdata[j]`, ++ "unexpected char in declaration: %s" % repr(rawdata[j]), + self.getpos()) + return -1 # incomplete + +@@ -330,7 +330,7 @@ class HTMLParser: + else: + offset = offset + len(self.__starttag_text) + raise HTMLParseError("junk characters in start tag: %s" +- % `rawdata[k:endpos][:20]`, ++ % repr(rawdata[k:endpos][:20]), + (lineno, offset)) + if end[-2:] == '/>': + # XHTML-style empty tag: +@@ -384,7 +384,7 @@ class HTMLParser: + j = match.end() + match = endtagfind.match(rawdata, i) # + if not match: +- raise HTMLParseError("bad end tag: %s" % `rawdata[i:j]`, ++ raise HTMLParseError("bad end tag: %s" % repr(rawdata[i:j]), + self.getpos()) + tag = match.group(1) + self.handle_endtag(string.lower(tag)) +--- webunit/SimpleDOM.py.orig 2009-06-05 16:30:44 UTC ++++ webunit/SimpleDOM.py +@@ -35,8 +35,8 @@ Simple usage: + import sys, string + + # NOTE this is using a modified HTMLParser +-from HTMLParser import HTMLParser, HTMLParseError +-from utility import Upload ++from .HTMLParser import HTMLParser, HTMLParseError ++from .utility import Upload + + BOOLEAN_HTML_ATTRS = [ + # List of Boolean attributes in HTML that may be given in +@@ -139,7 +139,7 @@ class SimpleDOMNode: + for entry in l: + if hasattr(entry, 'id') and entry.id == id: + return entry +- raise ValueError, 'No %r with id %r'%(name, id) ++ raise ValueError('No %r with id %r'%(name, id)) + + def getByNameFlat(self, name): + '''Return all nodes of type "name" from the contents of this node. +@@ -182,21 +182,21 @@ class SimpleDOMNode: + return self.getContents()[item] + + def hasattr(self, attr): +- return self.__dict__['__attributes'].has_key(attr) ++ return attr in self.__dict__['__attributes'] + + def getattr(self, attr, default=_marker): +- if self.__dict__['__attributes'].has_key(attr): ++ if attr in self.__dict__['__attributes']: + return self.__dict__['__attributes'][attr] + if default is _marker: +- raise AttributeError, attr ++ raise AttributeError(attr) + return default + + def __getattr__(self, attr): +- if self.__dict__['__attributes'].has_key(attr): ++ if attr in self.__dict__['__attributes']: + return self.__dict__['__attributes'][attr] +- if self.__dict__.has_key(attr): ++ if attr in self.__dict__: + return self.__dict__[attr] +- raise AttributeError, attr ++ raise AttributeError(attr) + + def __len__(self): + return len(self.getContents()) +@@ -209,7 +209,7 @@ class SimpleDOMNode: + + def __str__(self): + attrs = [] +- for attr in self.__dict__['__attributes'].items(): ++ for attr in list(self.__dict__['__attributes'].items()): + if attr[0] in BOOLEAN_HTML_ATTRS: + attrs.append(attr[0]) + else: +@@ -339,8 +339,8 @@ class SimpleDOMParser(HTMLParser): + + def handle_starttag(self, tag, attrs): + if self.__debug: +- print '\n>handle_starttag', tag +- print self.tagstack ++ print('\n>handle_starttag', tag) ++ print(self.tagstack) + self.close_para_tags(tag) + self.tagstack.append(tag) + d = {} +@@ -352,8 +352,8 @@ class SimpleDOMParser(HTMLParser): + + def handle_startendtag(self, tag, attrs): + if self.__debug: +- print '> etc. in the source is an error + raise EmptyTagError(tag, self.getpos()) +@@ -375,7 +375,7 @@ class SimpleDOMParser(HTMLParser): + if tag in EMPTY_HTML_TAGS: + return + close_to = -1 +- if BLOCK_CLOSING_TAG_MAP.has_key(tag): ++ if tag in BLOCK_CLOSING_TAG_MAP: + blocks_to_close = BLOCK_CLOSING_TAG_MAP[tag] + for i in range(len(self.tagstack)): + t = self.tagstack[i] +@@ -404,8 +404,8 @@ class SimpleDOMParser(HTMLParser): + + def implied_endtag(self, tag, implied): + if self.__debug: +- print '. + +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + import re + import time + import sys +-from urllib import quote_plus, _is_unicode ++from urllib.parse import quote_plus + try: + from poster.encode import multipart_encode + canupload = True +@@ -32,7 +32,7 @@ except: + import simplejson as json + try: + import gzip +- import StringIO ++ import io + except: + gzip = False + +@@ -80,8 +80,8 @@ class APIRequest: + self.headers['Accept-Encoding'] = 'gzip' + self.wiki = wiki + self.response = False +- self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(wiki.cookies)) +- self.request = urllib2.Request(self.wiki.apibase, self.encodeddata, self.headers) ++ self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(wiki.cookies)) ++ self.request = urllib.request.Request(self.wiki.apibase, self.encodeddata, self.headers) + + def setMultipart(self, multipart=True): + """Enable multipart data transfer, required for file uploads.""" +@@ -125,7 +125,7 @@ class APIRequest: + self.encodeddata = urlencode(self.data, 1) + self.headers['Content-Length'] = len(self.encodeddata) + self.headers['Content-Type'] = "application/x-www-form-urlencoded" +- self.request = urllib2.Request(self.wiki.apibase, self.encodeddata, self.headers) ++ self.request = urllib.request.Request(self.wiki.apibase, self.encodeddata, self.headers) + + def query(self, querycontinue=True): + """Actually do the query here and return usable stuff +@@ -152,14 +152,14 @@ class APIRequest: + total = initialdata + res = initialdata + params = self.data +- numkeys = len(res['query-continue'].keys()) ++ numkeys = len(list(res['query-continue'].keys())) + while numkeys > 0: + key1 = '' + key2 = '' +- possiblecontinues = res['query-continue'].keys() ++ possiblecontinues = list(res['query-continue'].keys()) + if len(possiblecontinues) == 1: + key1 = possiblecontinues[0] +- keylist = res['query-continue'][key1].keys() ++ keylist = list(res['query-continue'][key1].keys()) + if len(keylist) == 1: + key2 = keylist[0] + else: +@@ -171,7 +171,7 @@ class APIRequest: + key2 = keylist[0] + else: + for posskey in possiblecontinues: +- keylist = res['query-continue'][posskey].keys() ++ keylist = list(res['query-continue'][posskey].keys()) + for key in keylist: + if len(key) < 11: + key1 = posskey +@@ -181,7 +181,7 @@ class APIRequest: + break + else: + key1 = possiblecontinues[0] +- key2 = res['query-continue'][key1].keys()[0] ++ key2 = list(res['query-continue'][key1].keys())[0] + if isinstance(res['query-continue'][key1][key2], int): + cont = res['query-continue'][key1][key2] + else: +@@ -198,7 +198,7 @@ class APIRequest: + for type in possiblecontinues: + total = resultCombine(type, total, res) + if 'query-continue' in res: +- numkeys = len(res['query-continue'].keys()) ++ numkeys = len(list(res['query-continue'].keys())) + else: + numkeys = 0 + return total +@@ -216,11 +216,11 @@ class APIRequest: + if gzip: + encoding = self.response.get('Content-encoding') + if encoding in ('gzip', 'x-gzip'): +- data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data.read())) +- except catcherror, exc: ++ data = gzip.GzipFile('', 'rb', 9, io.StringIO(data.read())) ++ except catcherror as exc: + errname = sys.exc_info()[0].__name__ + errinfo = exc +- print("%s: %s trying request again in %d seconds" % (errname, errinfo, self.sleep)) ++ print(("%s: %s trying request again in %d seconds" % (errname, errinfo, self.sleep))) + time.sleep(self.sleep+0.5) + self.sleep+=5 + return data +@@ -234,10 +234,10 @@ class APIRequest: + content = None + if isinstance(parsed, dict): + content = APIResult(parsed) +- content.response = self.response.items() ++ content.response = list(self.response.items()) + elif isinstance(parsed, list): + content = APIListResult(parsed) +- content.response = self.response.items() ++ content.response = list(self.response.items()) + else: + content = parsed + if 'error' in content: +@@ -246,7 +246,7 @@ class APIRequest: + lagtime = int(re.search("(\d+) seconds", content['error']['info']).group(1)) + if lagtime > self.wiki.maxwaittime: + lagtime = self.wiki.maxwaittime +- print("Server lag, sleeping for "+str(lagtime)+" seconds") ++ print(("Server lag, sleeping for "+str(lagtime)+" seconds")) + maxlag = True + time.sleep(int(lagtime)+0.5) + return False +@@ -254,7 +254,7 @@ class APIRequest: + data.seek(0) + if "MediaWiki API is not enabled for this site. Add the following line to your LocalSettings.php
$wgEnableAPI=true;
" in data.read(): + raise APIDisabled("The API is not enabled on this site") +- print "Invalid JSON, trying request again" ++ print("Invalid JSON, trying request again") + # FIXME: Would be nice if this didn't just go forever if its never going to work + return False + return content +@@ -276,7 +276,7 @@ def resultCombine(type, old, new): + if type in new['query']: # Basic list, easy + ret['query'][type].extend(new['query'][type]) + else: # Else its some sort of prop=thing and/or a generator query +- for key in new['query']['pages'].keys(): # Go through each page ++ for key in list(new['query']['pages'].keys()): # Go through each page + if not key in old['query']['pages']: # if it only exists in the new one + ret['query']['pages'][key] = new['query']['pages'][key] # add it to the list + else: +@@ -300,7 +300,7 @@ def urlencode(query,doseq=0): + """ + if hasattr(query,"items"): + # mapping objects +- query = query.items() ++ query = list(query.items()) + else: + # it's a bother at times that strings and string-like objects are + # sequences... +@@ -315,7 +315,7 @@ def urlencode(query,doseq=0): + # preserved for consistency + except TypeError: + ty,va,tb = sys.exc_info() +- raise TypeError, "not a valid non-string sequence or mapping object", tb ++ raise TypeError("not a valid non-string sequence or mapping object").with_traceback(tb) + + l = [] + if not doseq: +--- wikitools/wiki.py.orig 2010-04-14 21:48:10 UTC ++++ wikitools/wiki.py +@@ -15,15 +15,15 @@ + # You should have received a copy of the GNU General Public License + # along with wikitools. If not, see . + +-import cookielib +-import api +-import urllib ++import http.cookiejar ++from . import api ++import urllib.request, urllib.parse, urllib.error + import re + import time + import os +-from urlparse import urlparse ++from urllib.parse import urlparse + try: +- import cPickle as pickle ++ import pickle as pickle + except: + import pickle + +@@ -112,10 +112,10 @@ class Wiki: + for ns in nsaliasdata: + self.NSaliases[ns['*']] = ns['id'] + if not 'writeapi' in sidata: +- print "WARNING: Write-API not enabled, you will not be able to edit" ++ print("WARNING: Write-API not enabled, you will not be able to edit") + version = re.search("\d\.(\d\d)", self.siteinfo['generator']) + if not int(version.group(1)) >= 13: # Will this even work on 13? +- print "WARNING: Some features may not work on older versions of MediaWiki" ++ print("WARNING: Some features may not work on older versions of MediaWiki") + return self + + def login(self, username, password=False, remember=False, force=False, verify=True, domain=None): +@@ -145,10 +145,10 @@ class Wiki: + password = getpass() + def loginerror(info): + try: +- print info['login']['result'] ++ print(info['login']['result']) + except: +- print info['error']['code'] +- print info['error']['info'] ++ print(info['error']['code']) ++ print(info['error']['info']) + return False + data = { + "action" : "login", +@@ -286,11 +286,11 @@ class Wiki: + class CookiesExpired(WikiError): + """Cookies are expired, needs to be an exception so login() will use the API instead""" + +-class WikiCookieJar(cookielib.FileCookieJar): ++class WikiCookieJar(http.cookiejar.FileCookieJar): + def save(self, site, filename=None, ignore_discard=False, ignore_expires=False): + if not filename: + filename = self.filename +- old_umask = os.umask(0077) ++ old_umask = os.umask(0o077) + f = open(filename, 'w') + f.write('') + content = '' +@@ -325,6 +325,6 @@ class WikiCookieJar(cookielib.FileCookieJar): + if not ignore_expires and cook.is_expired: + continue + self.set_cookie(cook) +- exec sitedata ++ exec(sitedata) + f.close() + diff --git a/www/qt5-webengine/Makefile b/www/qt5-webengine/Makefile index 37916ea6f08..1eda38d1b71 100644 --- a/www/qt5-webengine/Makefile +++ b/www/qt5-webengine/Makefile @@ -16,7 +16,7 @@ PORTNAME= webengine DISTVERSION= ${QT5_VERSION} -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= www PKGNAMEPREFIX= qt5- diff --git a/www/qt5-webkit/Makefile b/www/qt5-webkit/Makefile index 3e34a203cfc..4356e92ad60 100644 --- a/www/qt5-webkit/Makefile +++ b/www/qt5-webkit/Makefile @@ -1,6 +1,6 @@ PORTNAME= webkit DISTVERSION= 5.212.0-alpha4 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= www MASTER_SITES= https://github.com/qt${PORTNAME}/qt${PORTNAME}/releases/download/${DISTNAME}/ PKGNAMEPREFIX= qt5- diff --git a/www/redmine4/Makefile b/www/redmine4/Makefile index 6e92962b25e..42b1ebe1724 100644 --- a/www/redmine4/Makefile +++ b/www/redmine4/Makefile @@ -1,5 +1,6 @@ PORTNAME= redmine PORTVERSION= 4.1.6 +PORTREVISION= 1 CATEGORIES= www MASTER_SITES= https://www.redmine.org/releases/ PKGNAMESUFFIX= 4 @@ -13,7 +14,7 @@ LICENSE_FILE= ${WRKSRC}/doc/COPYING PORTSCOUT= limit:^4\.1\. RUN_DEPENDS= rubygem-bundler>=1.5.0:sysutils/rubygem-bundler \ - rubygem-rails52>=5.2.5<5.3:www/rubygem-rails52 \ + rubygem-rails52>=5.2.6<5.3:www/rubygem-rails52 \ rubygem-rouge312>=3.12.0:textproc/rubygem-rouge312 \ rubygem-request_store14>=1.0.5:devel/rubygem-request_store14 \ rubygem-mini_mime>=1.0.1:mail/rubygem-mini_mime \ diff --git a/www/redmine4/files/patch-Gemfile b/www/redmine4/files/patch-Gemfile index 4f2e3d9f2ce..2ff2386f45d 100644 --- a/www/redmine4/files/patch-Gemfile +++ b/www/redmine4/files/patch-Gemfile @@ -8,7 +8,7 @@ gem "bundler", ">= 1.5.0" -gem 'rails', '5.2.6.2' -+gem 'rails', '5.2.6' ++gem 'rails', '~> 5.2.6' gem 'sprockets', '~> 3.7.2' if RUBY_VERSION < '2.5' gem 'globalid', '~> 0.4.2' if Gem.ruby_version < Gem::Version.new('2.6.0') gem "rouge", "~> 3.12.0" diff --git a/www/redmine42/Makefile b/www/redmine42/Makefile index 70da89c44f9..15436ce1230 100644 --- a/www/redmine42/Makefile +++ b/www/redmine42/Makefile @@ -1,5 +1,6 @@ PORTNAME= redmine PORTVERSION= 4.2.4 +PORTREVISION= 1 CATEGORIES= www MASTER_SITES= https://www.redmine.org/releases/ PKGNAMESUFFIX= 42 @@ -11,7 +12,7 @@ LICENSE= GPLv2 LICENSE_FILE= ${WRKSRC}/doc/COPYING RUN_DEPENDS= rubygem-bundler>=1.5.0:sysutils/rubygem-bundler \ - rubygem-rails52>=5.2.4.5:www/rubygem-rails52 \ + rubygem-rails52>=5.2.6<5.3:www/rubygem-rails52 \ rubygem-rouge>=3.26.0:textproc/rubygem-rouge \ rubygem-request_store>=1.5.0:devel/rubygem-request_store \ rubygem-mini_mime>=1.0.1:mail/rubygem-mini_mime \ diff --git a/www/redmine42/files/patch-Gemfile b/www/redmine42/files/patch-Gemfile index 7921212f974..96d38144291 100644 --- a/www/redmine42/files/patch-Gemfile +++ b/www/redmine42/files/patch-Gemfile @@ -5,7 +5,7 @@ gem 'bundler', '>= 1.12.0' -gem 'rails', '5.2.6.2' -+gem 'rails', '5.2.6' ++gem 'rails', '~> 5.2.6' gem 'sprockets', '~> 3.7.2' if RUBY_VERSION < '2.5' gem 'globalid', '~> 0.4.2' if Gem.ruby_version < Gem::Version.new('2.6.0') -gem 'rouge', '~> 3.26.0' diff --git a/www/rssroll/Makefile b/www/rssroll/Makefile index f00bd9618f0..f41bc2e6b86 100644 --- a/www/rssroll/Makefile +++ b/www/rssroll/Makefile @@ -1,5 +1,6 @@ PORTNAME= rssroll PORTVERSION= 0.6.2 +PORTREVISION= 1 CATEGORIES= www MAINTAINER= koue@chaosophia.net diff --git a/www/rsstool/Makefile b/www/rsstool/Makefile index 3ad9816a585..bfdb025ddb1 100644 --- a/www/rsstool/Makefile +++ b/www/rsstool/Makefile @@ -2,7 +2,7 @@ PORTNAME= rsstool PORTVERSION= 1.0.0 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= www MASTER_SITES= BERLIOS DISTNAME= ${PORTNAME}-${PORTVERSION}-src diff --git a/www/rubygem-actioncable52/Makefile b/www/rubygem-actioncable52/Makefile index 22cdce7e2e0..369a80451b1 100644 --- a/www/rubygem-actioncable52/Makefile +++ b/www/rubygem-actioncable52/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= actioncable -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/www/rubygem-actioncable52/distinfo b/www/rubygem-actioncable52/distinfo index a31f4690bbb..6023d070ca1 100644 --- a/www/rubygem-actioncable52/distinfo +++ b/www/rubygem-actioncable52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298708 -SHA256 (rubygem/actioncable-5.2.6.gem) = d523ec54bac8d37f587838aa5c502b87e6900c96fbc3b51dffc029f83845e7e8 -SIZE (rubygem/actioncable-5.2.6.gem) = 41472 +TIMESTAMP = 1647264900 +SHA256 (rubygem/actioncable-5.2.7.gem) = 91782cc5866f6e26ff2c39e2fba02c68da02617e99d3e2f229f238e181a5e588 +SIZE (rubygem/actioncable-5.2.7.gem) = 41472 diff --git a/www/rubygem-actioncable60/Makefile b/www/rubygem-actioncable60/Makefile index 3f5e636cef1..7fe31cf4d1c 100644 --- a/www/rubygem-actioncable60/Makefile +++ b/www/rubygem-actioncable60/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= actioncable -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/www/rubygem-actioncable60/distinfo b/www/rubygem-actioncable60/distinfo index 47a9e629b7e..7e8f87adc96 100644 --- a/www/rubygem-actioncable60/distinfo +++ b/www/rubygem-actioncable60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058920 -SHA256 (rubygem/actioncable-6.0.4.6.gem) = 8df46913b7e60b88996b9277d009535eb666f1438c97b7410eaf70c18d556e27 -SIZE (rubygem/actioncable-6.0.4.6.gem) = 42496 +TIMESTAMP = 1647264926 +SHA256 (rubygem/actioncable-6.0.4.7.gem) = 7f4291d285a23e8cf92b9feba564332044ab41f351336e7256ba353d21f559b3 +SIZE (rubygem/actioncable-6.0.4.7.gem) = 42496 diff --git a/www/rubygem-actioncable61/Makefile b/www/rubygem-actioncable61/Makefile index 3508d0b45c6..929cfe1101d 100644 --- a/www/rubygem-actioncable61/Makefile +++ b/www/rubygem-actioncable61/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= actioncable -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/www/rubygem-actioncable61/distinfo b/www/rubygem-actioncable61/distinfo index 480115de5a7..fbd7e9c36e2 100644 --- a/www/rubygem-actioncable61/distinfo +++ b/www/rubygem-actioncable61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058946 -SHA256 (rubygem/actioncable-6.1.4.6.gem) = f7c53f60af767680a8cdbdd8bd3c6b88aaca78b21210b9109a429f9607a46c29 -SIZE (rubygem/actioncable-6.1.4.6.gem) = 41984 +TIMESTAMP = 1647264952 +SHA256 (rubygem/actioncable-6.1.4.7.gem) = ede23e643ae700bfa10714d0f398a5ecc61213a3b7cc5ef76f8f505971aed94e +SIZE (rubygem/actioncable-6.1.4.7.gem) = 41984 diff --git a/www/rubygem-actioncable70/Makefile b/www/rubygem-actioncable70/Makefile index 53901103fa9..3568400e13a 100644 --- a/www/rubygem-actioncable70/Makefile +++ b/www/rubygem-actioncable70/Makefile @@ -1,7 +1,7 @@ # Created by: Sunpoet Po-Chuan Hsieh PORTNAME= actioncable -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/www/rubygem-actioncable70/distinfo b/www/rubygem-actioncable70/distinfo index 1d38e7d6db3..34d27eec2b8 100644 --- a/www/rubygem-actioncable70/distinfo +++ b/www/rubygem-actioncable70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058972 -SHA256 (rubygem/actioncable-7.0.2.gem) = 48409db96e6d788b4d95ead9a475dbb5e4b5b72b22a716a57fb83b3ac9c8f3af -SIZE (rubygem/actioncable-7.0.2.gem) = 44032 +TIMESTAMP = 1647264978 +SHA256 (rubygem/actioncable-7.0.2.3.gem) = 3861a59368daccdf77acaffbac4209f7f5b990dc2f5cee1f0cccef4c06904b15 +SIZE (rubygem/actioncable-7.0.2.3.gem) = 44032 diff --git a/www/rubygem-actionpack52/Makefile b/www/rubygem-actionpack52/Makefile index fd9ec49ddb2..dbff930a037 100644 --- a/www/rubygem-actionpack52/Makefile +++ b/www/rubygem-actionpack52/Makefile @@ -1,7 +1,7 @@ # Created by: Jonathan Weiss () PORTNAME= actionpack -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/www/rubygem-actionpack52/distinfo b/www/rubygem-actionpack52/distinfo index c60d900e57b..266ef031d0f 100644 --- a/www/rubygem-actionpack52/distinfo +++ b/www/rubygem-actionpack52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298710 -SHA256 (rubygem/actionpack-5.2.6.gem) = 1e8c541046f3f8e40cdfe7894a4c5f1da7b910630a82bd97b4cf0b870aa15d2c -SIZE (rubygem/actionpack-5.2.6.gem) = 214528 +TIMESTAMP = 1647264902 +SHA256 (rubygem/actionpack-5.2.7.gem) = 155ade3510ddb72719105a5b79bd3985ca0ae926ca72e0fdd81a5a0d2d67fa25 +SIZE (rubygem/actionpack-5.2.7.gem) = 214528 diff --git a/www/rubygem-actionpack60/Makefile b/www/rubygem-actionpack60/Makefile index 57559aafd75..41040d72aa5 100644 --- a/www/rubygem-actionpack60/Makefile +++ b/www/rubygem-actionpack60/Makefile @@ -1,7 +1,7 @@ # Created by: Jonathan Weiss () PORTNAME= actionpack -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/www/rubygem-actionpack60/distinfo b/www/rubygem-actionpack60/distinfo index aa24c0c27dc..e4a9b4d9cf4 100644 --- a/www/rubygem-actionpack60/distinfo +++ b/www/rubygem-actionpack60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058922 -SHA256 (rubygem/actionpack-6.0.4.6.gem) = d749a2b7c3e6a75c8f38417b2411a96c4b55ab6c0a95d09d4da67d1668b8ee96 -SIZE (rubygem/actionpack-6.0.4.6.gem) = 218624 +TIMESTAMP = 1647264928 +SHA256 (rubygem/actionpack-6.0.4.7.gem) = 65af491a0937e0ccdc982971c3e69dc33bbd345897b0c66f17c03ce3212dcc05 +SIZE (rubygem/actionpack-6.0.4.7.gem) = 218624 diff --git a/www/rubygem-actionpack61/Makefile b/www/rubygem-actionpack61/Makefile index 65883bcc080..85fe859ae7a 100644 --- a/www/rubygem-actionpack61/Makefile +++ b/www/rubygem-actionpack61/Makefile @@ -1,7 +1,7 @@ # Created by: Jonathan Weiss () PORTNAME= actionpack -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/www/rubygem-actionpack61/distinfo b/www/rubygem-actionpack61/distinfo index 8eddf1ed303..517caec3c1c 100644 --- a/www/rubygem-actionpack61/distinfo +++ b/www/rubygem-actionpack61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058948 -SHA256 (rubygem/actionpack-6.1.4.6.gem) = febc3ae0b7b1c6c7b4e6902d9e8bb7542d10deefd526e42ee064f371c1ee96b4 -SIZE (rubygem/actionpack-6.1.4.6.gem) = 225792 +TIMESTAMP = 1647264954 +SHA256 (rubygem/actionpack-6.1.4.7.gem) = 24a0d2d1c3850e0b79ca18d25b09755ad2c65099374a98dbea80459e64152033 +SIZE (rubygem/actionpack-6.1.4.7.gem) = 225792 diff --git a/www/rubygem-actionpack70/Makefile b/www/rubygem-actionpack70/Makefile index 7c08e53cd6a..5ef0ed73101 100644 --- a/www/rubygem-actionpack70/Makefile +++ b/www/rubygem-actionpack70/Makefile @@ -1,5 +1,5 @@ PORTNAME= actionpack -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/www/rubygem-actionpack70/distinfo b/www/rubygem-actionpack70/distinfo index e94fc2c10aa..6a851018fd3 100644 --- a/www/rubygem-actionpack70/distinfo +++ b/www/rubygem-actionpack70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058974 -SHA256 (rubygem/actionpack-7.0.2.gem) = 8f7568d692376b89bd45004fb37d107ebf089f04eb49c7f9ee72e4798bfb9929 -SIZE (rubygem/actionpack-7.0.2.gem) = 228864 +TIMESTAMP = 1647264980 +SHA256 (rubygem/actionpack-7.0.2.3.gem) = 2b9bd31274dacecf62c73120154c05e14ba0060d3e310d82c03f98e7a63263f9 +SIZE (rubygem/actionpack-7.0.2.3.gem) = 228864 diff --git a/www/rubygem-cgi/Makefile b/www/rubygem-cgi/Makefile index 7f89223fcd4..91aa6983988 100644 --- a/www/rubygem-cgi/Makefile +++ b/www/rubygem-cgi/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= cgi -PORTVERSION= 0.3.1 +PORTVERSION= 0.3.2 CATEGORIES= www rubygems MASTER_SITES= RG @@ -17,6 +17,4 @@ USE_RUBY= yes CPE_VENDOR= ruby-lang -NO_ARCH= yes - .include diff --git a/www/rubygem-cgi/distinfo b/www/rubygem-cgi/distinfo index edf630c8cfb..542595d4270 100644 --- a/www/rubygem-cgi/distinfo +++ b/www/rubygem-cgi/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1637780483 -SHA256 (rubygem/cgi-0.3.1.gem) = 56032abb072ffaa66339ae35824aeab47a486e2e8f1b53681b1d056d5179c8ef -SIZE (rubygem/cgi-0.3.1.gem) = 37376 +TIMESTAMP = 1647264878 +SHA256 (rubygem/cgi-0.3.2.gem) = 92b22449ed44ead84c88a94013a06a6bff9a6fe0a73871c2b55ce7da06e9a827 +SIZE (rubygem/cgi-0.3.2.gem) = 38400 diff --git a/www/rubygem-jekyll/Makefile b/www/rubygem-jekyll/Makefile index 79e957ecaf4..825041c7542 100644 --- a/www/rubygem-jekyll/Makefile +++ b/www/rubygem-jekyll/Makefile @@ -1,7 +1,7 @@ # Created by: Peter Schuller PORTNAME= jekyll -PORTVERSION= 4.2.1 +PORTVERSION= 4.2.2 CATEGORIES= www rubygems MASTER_SITES= RG @@ -29,12 +29,12 @@ RUN_DEPENDS= rubygem-addressable>=2.4<3:www/rubygem-addressable \ USES= cpe gem USE_RUBY= yes -CONFLICTS_INSTALL= rubygem-jekyll3 - NO_ARCH= yes -CPE_VENDOR= jekyllrb - PLIST_FILES= bin/jekyll +CONFLICTS_INSTALL= rubygem-jekyll3 + +CPE_VENDOR= jekyllrb + .include diff --git a/www/rubygem-jekyll/distinfo b/www/rubygem-jekyll/distinfo index 08fb94fb465..56382544b2e 100644 --- a/www/rubygem-jekyll/distinfo +++ b/www/rubygem-jekyll/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632760452 -SHA256 (rubygem/jekyll-4.2.1.gem) = 4fb1439ce770dc7d73af083bf14cff352b076ab763ba20e4bac8e9b94b0243fb -SIZE (rubygem/jekyll-4.2.1.gem) = 125440 +TIMESTAMP = 1647264880 +SHA256 (rubygem/jekyll-4.2.2.gem) = f7c2ec8fdc41e5565c1e0e20df3bd6896162a5c26d75a684bef3eb144f9bfb36 +SIZE (rubygem/jekyll-4.2.2.gem) = 124928 diff --git a/www/rubygem-jsbundling-rails/Makefile b/www/rubygem-jsbundling-rails/Makefile index a1ebbcf0629..97b27f03eb4 100644 --- a/www/rubygem-jsbundling-rails/Makefile +++ b/www/rubygem-jsbundling-rails/Makefile @@ -1,7 +1,7 @@ # Created by: Po-Chuan Hsieh PORTNAME= jsbundling-rails -PORTVERSION= 1.0.1 +PORTVERSION= 1.0.2 CATEGORIES= www rubygems MASTER_SITES= RG diff --git a/www/rubygem-jsbundling-rails/distinfo b/www/rubygem-jsbundling-rails/distinfo index 5bf73f491e1..2946fbe39a5 100644 --- a/www/rubygem-jsbundling-rails/distinfo +++ b/www/rubygem-jsbundling-rails/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058890 -SHA256 (rubygem/jsbundling-rails-1.0.1.gem) = 2cf48c07b5c5c7f309e7035120e325392be1cef347d368b46dd94600084359df -SIZE (rubygem/jsbundling-rails-1.0.1.gem) = 9216 +TIMESTAMP = 1647264882 +SHA256 (rubygem/jsbundling-rails-1.0.2.gem) = 75898caa889968ebeb6d38dd31d4129a33eab3fdf7644f1cc3e47717aea8dbf4 +SIZE (rubygem/jsbundling-rails-1.0.2.gem) = 9216 diff --git a/www/rubygem-rails52/Makefile b/www/rubygem-rails52/Makefile index 76048f74963..950b8d26d84 100644 --- a/www/rubygem-rails52/Makefile +++ b/www/rubygem-rails52/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= rails -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/www/rubygem-rails52/distinfo b/www/rubygem-rails52/distinfo index e216a311675..dc108700174 100644 --- a/www/rubygem-rails52/distinfo +++ b/www/rubygem-rails52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298712 -SHA256 (rubygem/rails-5.2.6.gem) = ccdef9f57c2c0f67faae9d5b6d155f5e61b033f944499ea09d6383e6626d27dc -SIZE (rubygem/rails-5.2.6.gem) = 6656 +TIMESTAMP = 1647264904 +SHA256 (rubygem/rails-5.2.7.gem) = b650a8f5a0a896555b4539b9f523024a64c605fb849de92affea803a9c6b0ee6 +SIZE (rubygem/rails-5.2.7.gem) = 6656 diff --git a/www/rubygem-rails60/Makefile b/www/rubygem-rails60/Makefile index c2b663c8cd7..687b54cf97c 100644 --- a/www/rubygem-rails60/Makefile +++ b/www/rubygem-rails60/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= rails -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/www/rubygem-rails60/distinfo b/www/rubygem-rails60/distinfo index de3b1398457..936f5c6566a 100644 --- a/www/rubygem-rails60/distinfo +++ b/www/rubygem-rails60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058924 -SHA256 (rubygem/rails-6.0.4.6.gem) = 6034231cb496dcd4467fccfa2792ee921347eba1d666b866f8f3b530b72b1620 -SIZE (rubygem/rails-6.0.4.6.gem) = 6656 +TIMESTAMP = 1647264930 +SHA256 (rubygem/rails-6.0.4.7.gem) = cb2eccd564ac30e9efccc33f7bab0408cdaffc0d862ef41be275e8a3fa4a2be5 +SIZE (rubygem/rails-6.0.4.7.gem) = 6656 diff --git a/www/rubygem-rails61-node16/Makefile b/www/rubygem-rails61-node16/Makefile index bc60d3f8573..c39103ffd3b 100644 --- a/www/rubygem-rails61-node16/Makefile +++ b/www/rubygem-rails61-node16/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= rails -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61-node16 diff --git a/www/rubygem-rails61-node16/distinfo b/www/rubygem-rails61-node16/distinfo index cdd0f34bac3..97c87bcdaef 100644 --- a/www/rubygem-rails61-node16/distinfo +++ b/www/rubygem-rails61-node16/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058896 -SHA256 (rubygem/rails-6.1.4.6.gem) = eaa3d5cf0678a85eab88c4430aa49dc19fd79511a539aa006738f29ac5ebbb06 -SIZE (rubygem/rails-6.1.4.6.gem) = 6656 +TIMESTAMP = 1647264884 +SHA256 (rubygem/rails-6.1.4.7.gem) = c58a13335748caa55182e69afac033d864c84c2d1e7e891b754b56ea0de0974f +SIZE (rubygem/rails-6.1.4.7.gem) = 6656 diff --git a/www/rubygem-rails61/Makefile b/www/rubygem-rails61/Makefile index 15cdbb48f6c..181d533a5c6 100644 --- a/www/rubygem-rails61/Makefile +++ b/www/rubygem-rails61/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= rails -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/www/rubygem-rails61/distinfo b/www/rubygem-rails61/distinfo index adfe61c74ea..b0b2d6228ab 100644 --- a/www/rubygem-rails61/distinfo +++ b/www/rubygem-rails61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058950 -SHA256 (rubygem/rails-6.1.4.6.gem) = eaa3d5cf0678a85eab88c4430aa49dc19fd79511a539aa006738f29ac5ebbb06 -SIZE (rubygem/rails-6.1.4.6.gem) = 6656 +TIMESTAMP = 1647264956 +SHA256 (rubygem/rails-6.1.4.7.gem) = c58a13335748caa55182e69afac033d864c84c2d1e7e891b754b56ea0de0974f +SIZE (rubygem/rails-6.1.4.7.gem) = 6656 diff --git a/www/rubygem-rails70/Makefile b/www/rubygem-rails70/Makefile index b0c15052b53..30355307ef3 100644 --- a/www/rubygem-rails70/Makefile +++ b/www/rubygem-rails70/Makefile @@ -1,5 +1,5 @@ PORTNAME= rails -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/www/rubygem-rails70/distinfo b/www/rubygem-rails70/distinfo index 5977acf255d..bd4b173c26c 100644 --- a/www/rubygem-rails70/distinfo +++ b/www/rubygem-rails70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058976 -SHA256 (rubygem/rails-7.0.2.gem) = ef82869adc909aa7f318519d6b3e5c930a29f507e730e8b5af532d8f14d2ab72 -SIZE (rubygem/rails-7.0.2.gem) = 6656 +TIMESTAMP = 1647264982 +SHA256 (rubygem/rails-7.0.2.3.gem) = ee4e24075c72dec6e02e3fcddec86399c2b4eb0466efe4ccb5f78f96d3daa283 +SIZE (rubygem/rails-7.0.2.3.gem) = 6656 diff --git a/www/rubygem-railties52/Makefile b/www/rubygem-railties52/Makefile index 8dd93d3385e..919fa3bc098 100644 --- a/www/rubygem-railties52/Makefile +++ b/www/rubygem-railties52/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= railties -PORTVERSION= 5.2.6 +PORTVERSION= 5.2.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 52 diff --git a/www/rubygem-railties52/distinfo b/www/rubygem-railties52/distinfo index cfd5a58c6dc..672af886f15 100644 --- a/www/rubygem-railties52/distinfo +++ b/www/rubygem-railties52/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1620298714 -SHA256 (rubygem/railties-5.2.6.gem) = 93202a5b7a3bf0344182271da0352b02cef7317a0a4de8ebba0f8eb0bb3b4967 -SIZE (rubygem/railties-5.2.6.gem) = 236544 +TIMESTAMP = 1647264906 +SHA256 (rubygem/railties-5.2.7.gem) = 8bf2766310938cf67ca78bb5c48bcb36bd70a59899ff8d8bfe4ca129ead47746 +SIZE (rubygem/railties-5.2.7.gem) = 236544 diff --git a/www/rubygem-railties60/Makefile b/www/rubygem-railties60/Makefile index a22f5fbcfca..cf067268a5e 100644 --- a/www/rubygem-railties60/Makefile +++ b/www/rubygem-railties60/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= railties -PORTVERSION= 6.0.4.6 +PORTVERSION= 6.0.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 60 diff --git a/www/rubygem-railties60/distinfo b/www/rubygem-railties60/distinfo index c4b8feed245..c2cc67590c7 100644 --- a/www/rubygem-railties60/distinfo +++ b/www/rubygem-railties60/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058926 -SHA256 (rubygem/railties-6.0.4.6.gem) = caadaed53ff9d63125ee32d7e90867d25eaafba8b9b9b0e7dc80e7429d554bbe -SIZE (rubygem/railties-6.0.4.6.gem) = 455680 +TIMESTAMP = 1647264932 +SHA256 (rubygem/railties-6.0.4.7.gem) = 433161404cc81e8a849d735a32a57ecd3cbb88fb434581c0534a6a72506b7c0e +SIZE (rubygem/railties-6.0.4.7.gem) = 455680 diff --git a/www/rubygem-railties61/Makefile b/www/rubygem-railties61/Makefile index e2fbb2e9f56..58abf2457cd 100644 --- a/www/rubygem-railties61/Makefile +++ b/www/rubygem-railties61/Makefile @@ -1,7 +1,7 @@ # Created by: Johannes Meixner PORTNAME= railties -PORTVERSION= 6.1.4.6 +PORTVERSION= 6.1.4.7 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 61 diff --git a/www/rubygem-railties61/distinfo b/www/rubygem-railties61/distinfo index 4847160041a..2f38f09f220 100644 --- a/www/rubygem-railties61/distinfo +++ b/www/rubygem-railties61/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058952 -SHA256 (rubygem/railties-6.1.4.6.gem) = 0b96224a19212211e2571106c5648d2a0c134b4436567c92c8d0eb6ad7e5d0ea -SIZE (rubygem/railties-6.1.4.6.gem) = 457216 +TIMESTAMP = 1647264958 +SHA256 (rubygem/railties-6.1.4.7.gem) = cd370aac22990c89270e4d9f5bdad379447bce227ec5562f5bead58637dcd03c +SIZE (rubygem/railties-6.1.4.7.gem) = 457216 diff --git a/www/rubygem-railties70/Makefile b/www/rubygem-railties70/Makefile index b0cc4849198..c61d9dc7801 100644 --- a/www/rubygem-railties70/Makefile +++ b/www/rubygem-railties70/Makefile @@ -1,5 +1,5 @@ PORTNAME= railties -PORTVERSION= 7.0.2 +PORTVERSION= 7.0.2.3 CATEGORIES= www rubygems MASTER_SITES= RG PKGNAMESUFFIX= 70 diff --git a/www/rubygem-railties70/distinfo b/www/rubygem-railties70/distinfo index f9ddc100f5e..b3e464b7017 100644 --- a/www/rubygem-railties70/distinfo +++ b/www/rubygem-railties70/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1646058978 -SHA256 (rubygem/railties-7.0.2.gem) = e0cbd3a75038a5d1b3c2bd5f91cf0e6b6edf41fcb63b87270d4be9287f25fc30 -SIZE (rubygem/railties-7.0.2.gem) = 159232 +TIMESTAMP = 1647264984 +SHA256 (rubygem/railties-7.0.2.3.gem) = a4ff0c937da39c1a088574b5979fb5d300c24a2e55ab0cf6c086294895c35d8a +SIZE (rubygem/railties-7.0.2.3.gem) = 159232 diff --git a/www/sitecopy/Makefile b/www/sitecopy/Makefile index 16ca167c3f0..dcb50b4ef81 100644 --- a/www/sitecopy/Makefile +++ b/www/sitecopy/Makefile @@ -2,7 +2,7 @@ PORTNAME= sitecopy PORTVERSION= 0.16.6 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= www MASTER_SITES= http://www.manyfish.co.uk/${PORTNAME}/ \ GENTOO diff --git a/www/threejs/Makefile b/www/threejs/Makefile index 03bd8088aa8..ffd9ac089d0 100644 --- a/www/threejs/Makefile +++ b/www/threejs/Makefile @@ -1,7 +1,7 @@ # Created by: thierry@pompo.net PORTNAME= three.js -PORTVERSION= 138 +PORTVERSION= 139 DISTVERSIONPREFIX= r CATEGORIES= www diff --git a/www/threejs/distinfo b/www/threejs/distinfo index e01604b1ecd..f60529a28a5 100644 --- a/www/threejs/distinfo +++ b/www/threejs/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1645724002 -SHA256 (mrdoob-three.js-r138_GH0.tar.gz) = 47dd6077dc683333493301bbd16402c2ec6b709616f535cdf04427f50012f4d7 -SIZE (mrdoob-three.js-r138_GH0.tar.gz) = 311183503 +TIMESTAMP = 1648233332 +SHA256 (mrdoob-three.js-r139_GH0.tar.gz) = cb4891584fdb929450ef64b1b7a16ad1058e4f8e2cbc8fb2275a1542f9b73461 +SIZE (mrdoob-three.js-r139_GH0.tar.gz) = 311201756 diff --git a/www/threejs/pkg-plist b/www/threejs/pkg-plist index 3caace615e8..6be4f4dfb40 100644 --- a/www/threejs/pkg-plist +++ b/www/threejs/pkg-plist @@ -63,6 +63,7 @@ %%WWWDIR%%/extras/curves/QuadraticBezierCurve3.js %%WWWDIR%%/extras/curves/SplineCurve.js %%WWWDIR%%/geometries/BoxGeometry.js +%%WWWDIR%%/geometries/CapsuleGeometry.js %%WWWDIR%%/geometries/CircleGeometry.js %%WWWDIR%%/geometries/ConeGeometry.js %%WWWDIR%%/geometries/CylinderGeometry.js @@ -148,6 +149,7 @@ %%WWWDIR%%/math/Box2.js %%WWWDIR%%/math/Box3.js %%WWWDIR%%/math/Color.js +%%WWWDIR%%/math/ColorManagement.js %%WWWDIR%%/math/Cylindrical.js %%WWWDIR%%/math/Euler.js %%WWWDIR%%/math/Frustum.js diff --git a/www/tidy-html5/Makefile b/www/tidy-html5/Makefile index 161a8179b79..d557e516092 100644 --- a/www/tidy-html5/Makefile +++ b/www/tidy-html5/Makefile @@ -2,6 +2,7 @@ PORTNAME= tidy-html5 PORTVERSION= 5.8.0 +PORTREVISION= 1 CATEGORIES= www MAINTAINER= thierry@FreeBSD.org diff --git a/www/trafficserver/Makefile b/www/trafficserver/Makefile index b5a4eeb05ac..da4a1e31571 100644 --- a/www/trafficserver/Makefile +++ b/www/trafficserver/Makefile @@ -2,6 +2,7 @@ PORTNAME= trafficserver PORTVERSION= 9.1.2 +PORTREVISION= 1 CATEGORIES= www MASTER_SITES= APACHE/${PORTNAME} diff --git a/www/varnish4/Makefile b/www/varnish4/Makefile index a437bab70a6..1d19b913258 100644 --- a/www/varnish4/Makefile +++ b/www/varnish4/Makefile @@ -14,7 +14,7 @@ LICENSE_FILE= ${WRKSRC}/LICENSE BUILD_DEPENDS= rst2man:textproc/py-docutils LIB_DEPENDS= libpcre.so:devel/pcre -CONFLICTS= varnish-2.* varnish-3.* +CONFLICTS= varnish6 varnish7 USES= autoreconf cpe gmake libedit libtool ncurses pathfix \ pkgconfig python:build readline shebangfix diff --git a/www/varnish6/Makefile b/www/varnish6/Makefile index d310a079276..c4000df444a 100644 --- a/www/varnish6/Makefile +++ b/www/varnish6/Makefile @@ -18,7 +18,7 @@ LIB_DEPENDS= libpcre.so:devel/pcre USES= autoreconf compiler cpe gmake libedit libtool ncurses pathfix \ pkgconfig python:3.4+,build readline shebangfix -CONFLICTS= varnish-2.* varnish-3.* varnish4-4.* varnish5-5.* +CONFLICTS= varnish4 varnish7 SHEBANG_FILES= lib/libvcc/*.py CPE_VENDOR= varnish-cache diff --git a/www/varnish7/Makefile b/www/varnish7/Makefile new file mode 100644 index 00000000000..84b65dedfa9 --- /dev/null +++ b/www/varnish7/Makefile @@ -0,0 +1,57 @@ +PORTNAME= varnish +DISTVERSION= 7.1.0 +DISTVERSIONPREFIX= varnish- +CATEGORIES= www +PKGNAMESUFFIX= 7 + +MAINTAINER= dbaio@FreeBSD.org +COMMENT= High-performance HTTP accelerator + +LICENSE= BSD2CLAUSE +LICENSE_FILE= ${WRKSRC}/LICENSE + +BUILD_DEPENDS= rst2man:textproc/py-docutils@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}sphinx>=0,1:textproc/py-sphinx@${PY_FLAVOR} +LIB_DEPENDS= libpcre2-8.so:devel/pcre2 + +USES= autoreconf compiler cpe gmake libedit libtool ncurses pathfix \ + pkgconfig python:3.6+,build readline shebangfix + +CONFLICTS= varnish4 varnish6 + +SHEBANG_FILES= lib/libvcc/*.py lib/libvsc/*.py +CPE_VENDOR= varnish-cache +CFLAGS+= -I${LOCALBASE}/include +GNU_CONFIGURE= yes +CONFIGURE_ARGS= --localstatedir=${PREFIX} --without-dot +USE_LDCONFIG= yes +INSTALL_TARGET= install-strip +TEST_TARGET= check +TEST_ARGS= TESTS_PARALLELISM=1 + +USE_GITHUB= yes +GH_ACCOUNT= varnishcache +GH_PROJECT= varnish-cache + +USERS= varnish varnishlog +GROUPS= varnish + +USE_RC_SUBR= varnishd varnishlog varnishncsa +.if defined(NO_INET6) || defined(WITHOUT_INET6) +BAD_TESTS+= r00832 +EXTRA_PATCHES+= ${FILESDIR}/no-inet6.patch +.endif + +OPTIONS_DEFINE= DOCS + +.include + +post-patch: +.if defined(BAD_TESTS) + ${RM} ${BAD_TESTS:C|.+|${WRKSRC}/bin/varnishtest/tests/\0.vtc|} +.endif +.if ${CHOSEN_COMPILER_TYPE} == gcc + ${REINPLACE_CMD} -e '/-Wno-unknown-warning-option -Wno-implicit-fallthrough/d' ${WRKSRC}/configure.ac +.endif + +.include diff --git a/www/varnish7/distinfo b/www/varnish7/distinfo new file mode 100644 index 00000000000..0ad0ee3a62d --- /dev/null +++ b/www/varnish7/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1648299456 +SHA256 (varnishcache-varnish-cache-varnish-7.1.0_GH0.tar.gz) = 6dc4b066a949886e917736c21ae4e9ee3f519d7479820914643b45133a793684 +SIZE (varnishcache-varnish-cache-varnish-7.1.0_GH0.tar.gz) = 1742291 diff --git a/www/varnish7/files/no-inet6.patch b/www/varnish7/files/no-inet6.patch new file mode 100644 index 00000000000..56d16e63b10 --- /dev/null +++ b/www/varnish7/files/no-inet6.patch @@ -0,0 +1,8 @@ +--- bin/varnishtest/tests/c00005.vtc 2012-08-20 05:20:39.000000000 -0400 ++++ bin/varnishtest/tests/c00005.vtc 2012-09-26 12:09:59.000000000 -0400 +@@ -32,5 +32,4 @@ + ! "localhost"; + "0.0.0.0" / 0; +- "::" / 0; + } + diff --git a/www/varnish7/files/varnishd.in b/www/varnish7/files/varnishd.in new file mode 100644 index 00000000000..d066f082f6d --- /dev/null +++ b/www/varnish7/files/varnishd.in @@ -0,0 +1,125 @@ +#!/bin/sh + +# PROVIDE: varnishd +# REQUIRE: DAEMON +# KEYWORD: shutdown + +# +# Add the following line to /etc/rc.conf to enable varnishd: +# +# varnishd_enable="YES" +# +# Configuration variables and their default values: +# +# varnishd_pidfile - full path to the PID file. +# default: "/var/run/varnishd.pid" +# +# varnishd_listen - address and port at which varnishd will listen for +# client requests. +# default: ":80" +# +# varnishd_admin - address and port at which varnishd will listen for +# administrative commands. +# default: "localhost:81" +# +# varnishd_backend - address of the backend server. +# default: "localhost:8080" +# +# varnishd_config - name of the varnishd config file. +# default: unset. +# +# varnishd_hash - hash algorithm +# default: "critbit" +# +# varnishd_storage - storage method and parameters. +# default: "file,/tmp,100M" +# +# varnishd_jailuser - unprivileged user for the child process. +# default: "varnish" +# +# varnishd_flags - complete command line arguments. +# default if varnishd_config is unset: "-j unix,user=${varnishd_jailuser} -P ${varnishd_pidfile} -a ${varnishd_listen} -T ${varnishd_admin} -b ${varnishd_backend} -s ${varnishd_storage} -h ${varnishd_hash} ${varnishd_extra_flags}" +# default if varnishd_config is set: "-j unix,user=${varnishd_jailuser} -P ${varnishd_pidfile} -a ${varnishd_listen} -T ${varnishd_admin} -f ${varnishd_config} -s ${varnishd_storage} -h ${varnishd_hash} ${varnishd_extra_flags}" +# +# See varnishd(1) for a detailed overview of command-line options. +# + +. /etc/rc.subr + +name=varnishd +rcvar=varnishd_enable + +load_rc_config ${name} + +: ${varnishd_enable:=NO} +: ${varnishd_pidfile=/var/run/${name}.pid} +: ${varnishd_listen=:80} +: ${varnishd_admin=localhost:81} +: ${varnishd_backend=localhost:8080} +: ${varnishd_storage=file,/tmp,100M} +: ${varnishd_hash=critbit} +: ${varnishd_jailuser=varnish} + +command="%%PREFIX%%/sbin/${name}" +pidfile="${varnishd_pidfile}" +configtest_cmd="varnishd_checkconfig" +reload_cmd="varnishd_reload" +restart_precmd="varnishd_checkconfig" +start_precmd="varnishd_precmd" +extra_commands="status reload configtest" + +if [ -n "${varnishd_config}" ] ; then + : ${varnishd_flags:="-j unix,user=${varnishd_jailuser} -P ${varnishd_pidfile} -a ${varnishd_listen} -T ${varnishd_admin} -f ${varnishd_config} -s ${varnishd_storage} -h ${varnishd_hash} ${varnishd_extra_flags}"} +else + : ${varnishd_flags:="-j unix,user=${varnishd_jailuser} -P ${varnishd_pidfile} -a ${varnishd_listen} -T ${varnishd_admin} -b ${varnishd_backend} -s ${varnishd_storage} -h ${varnishd_hash} ${varnishd_extra_flags}"} +fi + +varnishd_checkconfig() +{ + if [ -z "${varnishd_config}" ]; then + echo "${name}: nothing to check, no configuration file defined, builtin VCL used" + else + echo "Performing sanity check on ${name} configuration:" + if eval ${command} -s ${varnishd_storage} ${varnishd_extra_flags} -C -f "${varnishd_config}" 2> /dev/null ; then + echo "${name}: the configuration file ${varnishd_config} syntax is ok" + else + err 1 "${name}: the configuration file ${varnishd_config} syntax is NOT ok" + fi + fi +} + +# Adapted from work done by Ingvar Hagelund (see redhat/varnish_reload_vcl) +varnishd_reload() +{ + local _current_config_name _new_config_name _varnishadm_cmd + + if [ -z "${varnishd_config}" ]; then + echo "${name}: nothing to reload, no configuration file defined, builtin VCL used" + else + _new_config_name="reloaded_$(date +%Y%m%d%H%M%S)" + _varnishadm_cmd="%%PREFIX%%/bin/varnishadm ${varnish_cli_flags}" + if ! eval ${_varnishadm_cmd} vcl.list > /dev/null; then + err 1 "${name}: can't connect to varnishadm" + fi + _current_config_name=$(${_varnishadm_cmd} vcl.list | awk ' /^active/ { print $3 } ') + if ! eval ${_varnishadm_cmd} vcl.load ${_new_config_name} ${varnishd_config} > /dev/null; then + err 1 "${name}: vcl.load failed, you're still using previous rules (${_current_config_name})" + fi + if eval ${_varnishadm_cmd} vcl.use ${_new_config_name} > /dev/null; then + echo "VCL file \"${varnishd_config}\" has been successfully loaded as \"${_new_config_name}\"" + echo "To remove previous loaded configurations, you should run \"${_varnishadm_cmd} vcl.discard \" by yourself" + else + err 1 "${name}: vcl.use failed, you're still using previous rules (${_current_config_name})" + fi + fi + + return 0 +} + +varnishd_precmd() +{ + # Check config before starting + varnishd_checkconfig +} + +run_rc_command "$1" diff --git a/www/varnish7/files/varnishlog.in b/www/varnish7/files/varnishlog.in new file mode 100644 index 00000000000..619ca7e9030 --- /dev/null +++ b/www/varnish7/files/varnishlog.in @@ -0,0 +1,62 @@ +#!/bin/sh + +# PROVIDE: varnishlog +# REQUIRE: DAEMON varnishd +# KEYWORD: shutdown + +# +# Add the following line to /etc/rc.conf to enable varnishlog: +# +# varnishlog_enable="YES" +# +# Configuration variables and their default values: +# +# varnishlog_pidfile - full path to the PID file. +# default: "/var/run/varnishlog.pid" +# +# varnishlog_file - full path to the log file. +# default: "/var/log/varnish.log" +# +# varnishlog_flags - command line arguments. +# default: "-t off -P ${varnishlog_pidfile} -D -a -A -w ${varnishlog_file}" +# +# Add the following line to /etc/newsyslog.conf to rotate the log file +# once a day: +# +# /var/log/varnish.log varnishlog:varnish 640 7 * @T00 JB /var/run/varnishlog.pid +# +# See varnishlog(1) for a detailed overview of command-line options. +# + +. /etc/rc.subr + +name=varnishlog +rcvar=varnishlog_enable + +load_rc_config ${name} +: ${varnishlog_enable:=NO} +: ${varnishlog_pidfile=/var/run/${name}.pid} +: ${varnishlog_file=/var/log/varnish.log} +: ${varnishlog_flags="-t off -P ${varnishlog_pidfile} -D -a -A -w ${varnishlog_file}"} + +procname="%%PREFIX%%/bin/${name}" +command="/usr/sbin/daemon" +command_args="-f -u varnishlog ${procname} ${varnishlog_flags}" +pidfile=${varnishlog_pidfile} +start_precmd=precmd + +precmd() +{ + # varnishlog_flags gets applied too early if we don't do this. + rc_flags="" + + if [ ! -e ${pidfile} ]; then + install -o varnishlog -g varnish -m 644 /dev/null ${pidfile}; + fi + + if [ ! -e ${varnishlog_file} ]; then + install -o varnishlog -g varnish -m 640 /dev/null ${varnishlog_file}; + fi +} + +run_rc_command "$1" diff --git a/www/varnish7/files/varnishncsa.in b/www/varnish7/files/varnishncsa.in new file mode 100644 index 00000000000..43560ab48b1 --- /dev/null +++ b/www/varnish7/files/varnishncsa.in @@ -0,0 +1,77 @@ +#!/bin/sh + +# PROVIDE: varnishncsa +# REQUIRE: DAEMON varnishd +# KEYWORD: shutdown + +# +# Add the following line to /etc/rc.conf to enable varnishncsa: +# +# varnishncsa_enable="YES" +# +# Configuration variables and their default values: +# +# varnishncsa_pidfile - full path to the PID file. +# default: "/var/run/varnishncsa.pid" +# +# varnishncsa_file - full path to the log file. +# default: "/var/log/varnishncsa.log" +# +# varnishncsa_log_method - log to file or syslog +# default: "-D -a -w ${varnishncsa_file}" +# varnishncsa_syslog="YES": '| /usr/bin/logger -t varnish -p daemon.info &' +# +# varnishncsa_flags - command line arguments. +# default: "-t off -P ${varnishncsa_pidfile} ${varnishncsa_logformat:+ -F \"$varnishncsa_logformat\"} ${varnishncsa_log_method}" +# +# varnishncsa_logformat - log file format. +# default: "" (uses varnishncsa's default format) +# example: "%h %l %u %t %r %s %b %{Referer}i %{User-agent}i" +# +# Add the following line to /etc/newsyslog.conf to rotate the log file +# once a day: +# +# /var/log/varnishncsa.log varnishlog:varnish 640 7 * @T00 JB /var/run/varnishncsa.pid +# +# See varnishncsa(1) for a detailed overview of command-line options. +# + +. /etc/rc.subr + +name=varnishncsa +rcvar=varnishncsa_enable + +load_rc_config ${name} + +: ${varnishncsa_enable:=NO} +: ${varnishncsa_pidfile=/var/run/${name}.pid} +: ${varnishncsa_file=/var/log/${name}.log} +: ${varnishncsa_flags="-t off -P ${varnishncsa_pidfile} ${varnishncsa_logformat:+-F \"$varnishncsa_logformat\"}"} +: ${varnishncsa_syslog:=NO} + +if checkyesno varnishncsa_syslog; then + varnishncsa_log_method='| /usr/bin/logger -t varnish -p daemon.info &' +else + varnishncsa_log_method="-D -a -w ${varnishncsa_file}" +fi + +command="%%PREFIX%%/bin/${name}" +command_args="${varnishncsa_flags} ${varnishncsa_log_method}" +pidfile=${varnishncsa_pidfile} +start_precmd=precmd + +precmd() +{ + # $varnishncsa_flags gets applied too early if we don't do this. + rc_flags="" + + if [ ! -e ${pidfile} ]; then + install -o varnishlog -g varnish -m 644 /dev/null ${pidfile}; + fi + + if [ ! -e ${varnishncsa_file} ]; then + install -o varnishlog -g varnish -m 640 /dev/null ${varnishncsa_file}; + fi +} + +run_rc_command "$1" diff --git a/www/varnish7/pkg-descr b/www/varnish7/pkg-descr new file mode 100644 index 00000000000..5063feafee3 --- /dev/null +++ b/www/varnish7/pkg-descr @@ -0,0 +1,12 @@ +This is the Varnish high-performance HTTP accelerator. + +Documentation and additional information about Varnish is available on +http://varnish-cache.org/docs/index.html + +Technical questions about Varnish and this release should be addressed +to + +Questions about commercial support and services related to Varnish +can be directed here: https://www.varnish-software.com/contact-us/ + +WWW: https://varnish-cache.org/ diff --git a/www/varnish7/pkg-message b/www/varnish7/pkg-message new file mode 100644 index 00000000000..f5b1f2bffc6 --- /dev/null +++ b/www/varnish7/pkg-message @@ -0,0 +1,18 @@ +[ +{ type: install + message: < + +PORTNAME= polo +DISTVERSION= 18.8-beta +PORTREVISION= 1 +DISTVERSIONPREFIX= v +CATEGORIES= x11-fm + +MAINTAINER= gnome@FreeBSD.org +COMMENT= Advanced graphical file manager written in Vala + +LICENSE= GPLv2+ + +BUILD_DEPENDS= valac:lang/vala +LIB_DEPENDS= libgee-0.8.so:devel/libgee \ + libharfbuzz.so:print/harfbuzz \ + libjson-glib-1.0.so:devel/json-glib + +USES= desktop-file-utils gettext-tools gmake gnome pkgconfig +USE_GNOME= cairo gdkpixbuf2 gtk30 libxml2 vte3 +USE_GITHUB= yes +GH_ACCOUNT= teejee2008 + +WRKSRC_SUBDIR= src +MAKEFILE= makefile + +post-patch: + @${RM} ${WRKSRC}/share/polo/files/gtk-theme/*.orig + @${RM} ${WRKSRC}/share/polo/files/install-*.sh + @${RM} -r ${WRKSRC}/share/polo/files/udisks2 + +.include diff --git a/x11-fm/polo/distinfo b/x11-fm/polo/distinfo new file mode 100644 index 00000000000..be251c5d44c --- /dev/null +++ b/x11-fm/polo/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1534048918 +SHA256 (teejee2008-polo-v18.8-beta_GH0.tar.gz) = 1112469c2107ac1ed08d1f2f966d7ee21baff773ff56dc56c137eaef2468515f +SIZE (teejee2008-polo-v18.8-beta_GH0.tar.gz) = 2040803 diff --git a/x11-fm/polo/files/patch-Gtk_ProgressPanel.vala b/x11-fm/polo/files/patch-Gtk_ProgressPanel.vala new file mode 100644 index 00000000000..7a7dde9ec94 --- /dev/null +++ b/x11-fm/polo/files/patch-Gtk_ProgressPanel.vala @@ -0,0 +1,11 @@ +--- Gtk/ProgressPanel.vala.orig 2018-08-12 04:41:58 UTC ++++ Gtk/ProgressPanel.vala +@@ -65,7 +65,7 @@ public abstract class ProgressPanel : Gtk.Box { + + public signal void task_complete(); + +- public ProgressPanel(FileViewPane _pane, Gee.ArrayList? _items, FileActionType _action){ ++ protected ProgressPanel(FileViewPane _pane, Gee.ArrayList? _items, FileActionType _action){ + //base(Gtk.Orientation.VERTICAL, 6); // issue with vala + Object(orientation: Gtk.Orientation.VERTICAL, spacing: 6); // work-around + margin = 6; diff --git a/x11-fm/polo/files/patch-Gtk_TermBox.vala b/x11-fm/polo/files/patch-Gtk_TermBox.vala new file mode 100644 index 00000000000..225e5d55de5 --- /dev/null +++ b/x11-fm/polo/files/patch-Gtk_TermBox.vala @@ -0,0 +1,11 @@ +--- Gtk/TermBox.vala.orig 2018-08-12 04:41:58 UTC ++++ Gtk/TermBox.vala +@@ -255,7 +255,7 @@ public class TermBox : Gtk.Box { + + #else + +- term.feed_child(cmd, -1); ++ term.feed_child((uint8[])cmd); + + #endif + } diff --git a/x11-fm/polo/files/patch-Utility_AsyncTask.vala b/x11-fm/polo/files/patch-Utility_AsyncTask.vala new file mode 100644 index 00000000000..fe63b1e6980 --- /dev/null +++ b/x11-fm/polo/files/patch-Utility_AsyncTask.vala @@ -0,0 +1,11 @@ +--- Utility/AsyncTask.vala.orig 2018-08-12 04:41:58 UTC ++++ Utility/AsyncTask.vala +@@ -87,7 +87,7 @@ public abstract class AsyncTask : GLib.Object{ + public signal void stderr_line_read(string line); + public signal void task_complete(); + +- public AsyncTask(){ ++ protected AsyncTask(){ + init_temp_directories(); + } + diff --git a/x11-fm/polo/files/patch-Utility_MediaFile.vala b/x11-fm/polo/files/patch-Utility_MediaFile.vala new file mode 100644 index 00000000000..99accd36be1 --- /dev/null +++ b/x11-fm/polo/files/patch-Utility_MediaFile.vala @@ -0,0 +1,11 @@ +--- Utility/MediaFile.vala.orig 2018-08-12 04:41:58 UTC ++++ Utility/MediaFile.vala +@@ -634,7 +634,7 @@ public abstract class MediaStream : GLib.Object{ + public string Description = ""; + public bool IsSelected = true; + +- public MediaStream(MediaStreamType _type){ ++ protected MediaStream(MediaStreamType _type){ + Type = _type; + } + diff --git a/x11-fm/polo/files/patch-Utility_SysInfo.vala b/x11-fm/polo/files/patch-Utility_SysInfo.vala new file mode 100644 index 00000000000..5e1cae7b28d --- /dev/null +++ b/x11-fm/polo/files/patch-Utility_SysInfo.vala @@ -0,0 +1,32 @@ +--- Utility/SysInfo.vala.orig 2018-08-12 04:41:58 UTC ++++ Utility/SysInfo.vala +@@ -50,7 +50,7 @@ public class SysInfo : GLib.Object { + string std_out, std_err; + exec_sync("uname -m", out std_out, out std_err); + +- if (std_out.replace("\n","").strip().down() == "x86_64"){ ++ if (std_out.replace("\n","").contains("64")){ + arch = 64; + } + else{ +@@ -61,7 +61,7 @@ public class SysInfo : GLib.Object { + public void query_cpu_cores(){ + + string std_out, std_err; +- exec_sync("grep -c ^processor /proc/cpuinfo", out std_out, out std_err); ++ exec_sync("sysctl -n hw.ncpu", out std_out, out std_err); + + cpu_cores = int.parse(std_out); + } +@@ -74,9 +74,9 @@ public class SysInfo : GLib.Object { + public void query_memory(){ + + string std_out, std_err; +- exec_script_sync("grep MemTotal /proc/meminfo | awk '{print $2}'", out std_out, out std_err); ++ exec_sync("sysctl -n hw.physmem", out std_out, out std_err); + +- mem_total_mb = (int) (int.parse(std_out) / 1024.0); ++ mem_total_mb = (int) (int.parse(std_out) / 1024.0 / 1024.0); + } + + public void print(){ diff --git a/x11-fm/polo/files/patch-Utility_TeeJee.Process.vala b/x11-fm/polo/files/patch-Utility_TeeJee.Process.vala new file mode 100644 index 00000000000..8eafff7a532 --- /dev/null +++ b/x11-fm/polo/files/patch-Utility_TeeJee.Process.vala @@ -0,0 +1,29 @@ +--- Utility/TeeJee.Process.vala.orig 2018-08-12 04:41:58 UTC ++++ Utility/TeeJee.Process.vala +@@ -192,7 +192,7 @@ namespace TeeJee.ProcessHelper{ + * Returns the script file path */ + + string sh = ""; +- sh += "#!/bin/bash\n"; ++ sh += "#!/bin/sh\n"; + sh += "\n"; + if (force_locale){ + sh += "LANG=C\n"; +@@ -216,7 +216,7 @@ namespace TeeJee.ProcessHelper{ + if (admin_mode){ + + sh = ""; +- sh += "#!/bin/bash\n"; ++ sh += "#!/bin/sh\n"; + sh += "pkexec env DISPLAY=$DISPLAY XAUTHORITY=$XAUTHORITY"; + sh += " '%s'\n".printf(escape_single_quote(sh_path)); + sh += "if [ -f status ]; then exit $(cat status); else exit 0; fi\n"; +@@ -293,7 +293,7 @@ namespace TeeJee.ProcessHelper{ + /* Get the process ID for a process with given name */ + + string std_out, std_err; +- exec_sync("pidof \"%s\"".printf(name), out std_out, out std_err); ++ exec_sync("pgrep -d'' \"%s\"".printf(name), out std_out, out std_err); + + if (std_out != null){ + string[] arr = std_out.split ("\n"); diff --git a/x11-fm/polo/files/patch-makefile b/x11-fm/polo/files/patch-makefile new file mode 100644 index 00000000000..041a1842d28 --- /dev/null +++ b/x11-fm/polo/files/patch-makefile @@ -0,0 +1,32 @@ +--- makefile.orig 2018-08-12 04:41:58 UTC ++++ makefile +@@ -1,7 +1,7 @@ +-SHELL=/bin/bash ++SHELL=/bin/sh + CFLAGS=--std=c99 + +-prefix=/usr ++prefix=$(PREFIX) + bindir=$(prefix)/bin + sharedir=$(prefix)/share + localedir=$(sharedir)/locale +@@ -101,17 +101,15 @@ install: + mkdir -p "$(DESTDIR)$(sharedir)/${app_name}" + mkdir -p "$(DESTDIR)$(sharedir)/pixmaps" + mkdir -p "$(DESTDIR)$(sharedir)/appdata" +- mkdir -p "$(DESTDIR)/var/log/polo" + + #binary + install -m 0755 ${app_name}-gtk "$(DESTDIR)$(bindir)" + install -m 0755 ${app_name}-gtk3-helper "$(DESTDIR)$(bindir)" + install -m 0755 ${app_name}-disk "$(DESTDIR)$(bindir)" +- install -m 0755 ${app_name}-uninstall "$(DESTDIR)$(bindir)" + + #shared files +- cp -dpr --no-preserve=ownership -t "$(DESTDIR)$(sharedir)/${app_name}" ./share/${app_name}/* +- chmod --recursive 0755 $(DESTDIR)$(sharedir)/${app_name}/* ++ cp -Ppr share/${app_name}/* "$(DESTDIR)$(sharedir)/${app_name}" ++ chmod -R 0755 $(DESTDIR)$(sharedir)/${app_name}/* + chmod a+x $(DESTDIR)$(sharedir)/${app_name}/files/gtk-theme/install-gtk-theme + + #launcher diff --git a/x11-fm/polo/files/patch-share_polo_files_gtk-theme_install-gtk-theme b/x11-fm/polo/files/patch-share_polo_files_gtk-theme_install-gtk-theme new file mode 100644 index 00000000000..a174735f961 --- /dev/null +++ b/x11-fm/polo/files/patch-share_polo_files_gtk-theme_install-gtk-theme @@ -0,0 +1,17 @@ +--- share/polo/files/gtk-theme/install-gtk-theme.orig 2018-08-12 04:41:58 UTC ++++ share/polo/files/gtk-theme/install-gtk-theme +@@ -1,4 +1,4 @@ +-#!/bin/bash ++#!/bin/sh + + gtk_major=$( polo-gtk3-helper --major ) + if [ $? -ne 0 ]; then exit 1; fi +@@ -11,7 +11,7 @@ DIR="$( cd "$( dirname "$0" )" && pwd )" + cd $DIR + if [ $? -ne 0 ]; then exit 1; fi + +-if (($gtk_minor == 14)) || (($gtk_minor == 16)) || (($gtk_minor == 18)) || (($gtk_minor == 20)) || (($gtk_minor == 22)); then ++if [ $gtk_minor = 14 -o $gtk_minor = 16 -o $gtk_minor = 18 -o $gtk_minor = 20 -o $gtk_minor = 22 ]; then + gtk_version="$gtk_major.$gtk_minor" + else + gtk_version="3.22" diff --git a/x11-fm/polo/pkg-descr b/x11-fm/polo/pkg-descr new file mode 100644 index 00000000000..3482c2f88cb --- /dev/null +++ b/x11-fm/polo/pkg-descr @@ -0,0 +1,7 @@ +Advanced file manager for Unix-like systems written in Vala. Supports +multiple panes (single, dual, quad) with multiple tabs in each pane. +Supports archive creation, extraction, and browsing. Support for cloud +storage; running and managing KVM images, modifying PDF documents and +image files, booting ISO files in KVM, writing ISO files to USB drives. + +WWW: https://teejee2008.github.io/polo/ diff --git a/x11-fm/polo/pkg-plist b/x11-fm/polo/pkg-plist new file mode 100644 index 00000000000..3d54f82f6ca --- /dev/null +++ b/x11-fm/polo/pkg-plist @@ -0,0 +1,847 @@ +bin/polo-disk +bin/polo-gtk +bin/polo-gtk3-helper +share/appdata/polo-gtk.appdata.xml +share/applications/polo-gtk.desktop +share/locale/de/LC_MESSAGES/polo.mo +share/locale/fr/LC_MESSAGES/polo.mo +share/locale/nl/LC_MESSAGES/polo.mo +share/pixmaps/polo.png +%%DATADIR%%/files/bashrc +%%DATADIR%%/files/fish_prompt.fish +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-checked@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/checkbox-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-checked@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/radio-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-header-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-header.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-header@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-active@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-header-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-header.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-header@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-selected.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch-selected@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/switch@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/thumbnail-frame.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-active-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-active.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-active@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-backdrop.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-hover.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close-hover@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-close@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-active.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-active@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-hover.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-maximize@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-active.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-active@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-hover.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/assets/titlebutton-minimize@2.png +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/gtk-dark.css +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/gtk.css +%%DATADIR%%/files/gtk-theme/3.14/gtk-3.0/thumbnail.png +%%DATADIR%%/files/gtk-theme/3.14/index.theme +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-checked@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/checkbox-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-checked@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/radio-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-header-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-header.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-header@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-active@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-header-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-header.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-header@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-selected.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch-selected@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/switch@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/thumbnail-frame.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-active-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-active.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-active@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-backdrop.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-hover.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close-hover@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-close@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-active.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-active@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-hover.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-maximize@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-active.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-active@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-hover.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/assets/titlebutton-minimize@2.png +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/gtk-dark.css +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/gtk.css +%%DATADIR%%/files/gtk-theme/3.16/gtk-3.0/thumbnail.png +%%DATADIR%%/files/gtk-theme/3.16/index.theme +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-checked@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/checkbox-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-checked@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/radio-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-header-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-header.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-header@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-active@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-header-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-header.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-header@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-selected.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch-selected@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/switch@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/thumbnail-frame.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-active-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-active.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-active@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-backdrop.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-hover.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close-hover@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-close@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-active.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-active@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-hover.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-maximize@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-active.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-active@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-hover.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/assets/titlebutton-minimize@2.png +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/gtk-dark.css +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/gtk.css +%%DATADIR%%/files/gtk-theme/3.18/gtk-3.0/thumbnail.png +%%DATADIR%%/files/gtk-theme/3.18/index.theme +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-checked@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-selectionmode-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-selectionmode-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-selectionmode.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-selectionmode@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/checkbox-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-checked@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-mixed@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/radio-unchecked@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-header-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-header.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-header@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-active@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-header-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-header.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-header@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-header-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-header-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-header.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-header@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-insensitive@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-selected.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch-selected@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/switch@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/thumbnail-frame.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-active-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-active.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-active@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-backdrop.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-hover.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close-hover@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-close@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-active.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-active@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-hover.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-maximize@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-active-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-active-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-active.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-active@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-backdrop-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-backdrop-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-backdrop.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-backdrop@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-hover-dark.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-hover-dark@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-hover.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize-hover@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/assets/titlebutton-minimize@2.png +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/gtk-dark.css +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/gtk.css +%%DATADIR%%/files/gtk-theme/3.22/gtk-3.0/thumbnail.png +%%DATADIR%%/files/gtk-theme/3.22/index.theme +%%DATADIR%%/files/gtk-theme/README +%%DATADIR%%/files/gtk-theme/install-gtk-theme +%%DATADIR%%/images/application-pdf.png +%%DATADIR%%/images/application-x-cd-image.png +%%DATADIR%%/images/apply.png +%%DATADIR%%/images/audio-volume-high-symbolic.svg +%%DATADIR%%/images/audio-volume-muted-symbolic.svg +%%DATADIR%%/images/bookmark-missing.svg +%%DATADIR%%/images/clamav.svg +%%DATADIR%%/images/clamav_logo.svg +%%DATADIR%%/images/collapse-menu-symbolic.svg +%%DATADIR%%/images/compare.svg +%%DATADIR%%/images/config.svg +%%DATADIR%%/images/dialog-error.svg +%%DATADIR%%/images/dialog-information.svg +%%DATADIR%%/images/dialog-password.svg +%%DATADIR%%/images/dialog-question.svg +%%DATADIR%%/images/dialog-warning.svg +%%DATADIR%%/images/disk-usage-analyzer.png +%%DATADIR%%/images/document-new.png +%%DATADIR%%/images/document-properties.png +%%DATADIR%%/images/donate.svg +%%DATADIR%%/images/drive-harddisk-symbolic.svg +%%DATADIR%%/images/dropbox.png +%%DATADIR%%/images/edit-clear.png +%%DATADIR%%/images/edit-copy-symbolic.svg +%%DATADIR%%/images/edit-cut-symbolic.svg +%%DATADIR%%/images/edit-delete-symbolic.svg +%%DATADIR%%/images/edit-find.png +%%DATADIR%%/images/edit-paste-symbolic.svg +%%DATADIR%%/images/edit-redo.png +%%DATADIR%%/images/edit-rename.png +%%DATADIR%%/images/edit-undo.png +%%DATADIR%%/images/emblem-documents.png +%%DATADIR%%/images/emblem-music.svg +%%DATADIR%%/images/emblem-photos.png +%%DATADIR%%/images/emblem-readonly.png +%%DATADIR%%/images/emblem-symbolic-link.png +%%DATADIR%%/images/emblem-videos.svg +%%DATADIR%%/images/error.png +%%DATADIR%%/images/expand-menu-symbolic.svg +%%DATADIR%%/images/extract-archive.svg +%%DATADIR%%/images/folder-copy.png +%%DATADIR%%/images/folder-move.png +%%DATADIR%%/images/folder-new.png +%%DATADIR%%/images/folder-open.png +%%DATADIR%%/images/folder.png +%%DATADIR%%/images/font-x-generic.png +%%DATADIR%%/images/fs-btrfs.png +%%DATADIR%%/images/fs-exfat.png +%%DATADIR%%/images/fs-ext2.png +%%DATADIR%%/images/fs-ext3.png +%%DATADIR%%/images/fs-ext4.png +%%DATADIR%%/images/fs-f2fs.png +%%DATADIR%%/images/fs-fat16.png +%%DATADIR%%/images/fs-fat32.png +%%DATADIR%%/images/fs-hfs+.png +%%DATADIR%%/images/fs-hfs.png +%%DATADIR%%/images/fs-hfsplus.png +%%DATADIR%%/images/fs-jfs.png +%%DATADIR%%/images/fs-luks.png +%%DATADIR%%/images/fs-nilfs2.png +%%DATADIR%%/images/fs-ntfs.png +%%DATADIR%%/images/fs-reiser4.png +%%DATADIR%%/images/fs-reiserfs.png +%%DATADIR%%/images/fs-ufs.png +%%DATADIR%%/images/fs-vfat.png +%%DATADIR%%/images/fs-xfs.png +%%DATADIR%%/images/go-down.png +%%DATADIR%%/images/go-home-symbolic.svg +%%DATADIR%%/images/go-next-symbolic.svg +%%DATADIR%%/images/go-previous-symbolic.svg +%%DATADIR%%/images/go-up-symbolic.svg +%%DATADIR%%/images/goa-panel.png +%%DATADIR%%/images/hash.svg +%%DATADIR%%/images/help-about-symbolic.svg +%%DATADIR%%/images/help-info.svg +%%DATADIR%%/images/image-missing.svg +%%DATADIR%%/images/insert-link.png +%%DATADIR%%/images/item-error.png +%%DATADIR%%/images/item-gray.svg +%%DATADIR%%/images/item-green.svg +%%DATADIR%%/images/item-red.svg +%%DATADIR%%/images/item-yellow.svg +%%DATADIR%%/images/kvm.svg +%%DATADIR%%/images/list-add-symbolic.svg +%%DATADIR%%/images/list-remove-symbolic.svg +%%DATADIR%%/images/lock-symbolic.png +%%DATADIR%%/images/locked.png +%%DATADIR%%/images/locked2.png +%%DATADIR%%/images/media-cdrom.png +%%DATADIR%%/images/media-eject.svg +%%DATADIR%%/images/media-flash.png +%%DATADIR%%/images/media-playback-pause-symbolic.svg +%%DATADIR%%/images/media-playback-start-symbolic.svg +%%DATADIR%%/images/middlebar-show.png +%%DATADIR%%/images/open-menu.svg +%%DATADIR%%/images/package-x-generic.png +%%DATADIR%%/images/partitionmanager.png +%%DATADIR%%/images/pathbar_arrows.png +%%DATADIR%%/images/pathbar_buttons.png +%%DATADIR%%/images/pathbar_compact.png +%%DATADIR%%/images/pathbar_flat_buttons.png +%%DATADIR%%/images/polo.png +%%DATADIR%%/images/polo_layout_dual_icons.png +%%DATADIR%%/images/polo_layout_dual_list.png +%%DATADIR%%/images/polo_layout_quad.png +%%DATADIR%%/images/polo_layout_single_icons.png +%%DATADIR%%/images/polo_layout_single_list.png +%%DATADIR%%/images/preferences-color.png +%%DATADIR%%/images/preferences-desktop.png +%%DATADIR%%/images/preferences-system-symbolic.svg +%%DATADIR%%/images/process-stop.png +%%DATADIR%%/images/process-stop.svg +%%DATADIR%%/images/progress.gif +%%DATADIR%%/images/sidebar-show.png +%%DATADIR%%/images/switch.svg +%%DATADIR%%/images/symbolic-link.png +%%DATADIR%%/images/tab-close.svg +%%DATADIR%%/images/tab-new.png +%%DATADIR%%/images/terminal-symbolic.svg +%%DATADIR%%/images/text-x-generic.svg +%%DATADIR%%/images/text-x-preview.svg +%%DATADIR%%/images/unlocked.png +%%DATADIR%%/images/unlocked2.png +%%DATADIR%%/images/user-bookmarks-symbolic.svg +%%DATADIR%%/images/user-bookmarks.svg +%%DATADIR%%/images/user-trash-symbolic.svg +%%DATADIR%%/images/view-filter.svg +%%DATADIR%%/images/view-fullscreen-symbolic.svg +%%DATADIR%%/images/view-grid-symbolic.svg +%%DATADIR%%/images/view-list-compact-symbolic.png +%%DATADIR%%/images/view-list-details-symbolic.png +%%DATADIR%%/images/view-list-icons-symbolic.png +%%DATADIR%%/images/view-list-images-symbolic.svg +%%DATADIR%%/images/view-refresh-symbolic.svg +%%DATADIR%%/images/view-refresh.png +%%DATADIR%%/images/web-amazon.png +%%DATADIR%%/images/web-google.png +%%DATADIR%%/images/web-microsoft.png +%%DATADIR%%/images/window-close.svg +%%DATADIR%%/images/window-maximize.svg +%%DATADIR%%/images/window-menu.svg +%%DATADIR%%/images/window-minimize.svg diff --git a/x11-fm/rodent/Makefile b/x11-fm/rodent/Makefile index d56081c2bc3..676438d6612 100644 --- a/x11-fm/rodent/Makefile +++ b/x11-fm/rodent/Makefile @@ -2,7 +2,7 @@ PORTNAME= rodent PORTVERSION= 5.3.16.3 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= x11-fm MASTER_SITES= SF/xffm/${PORTVERSION} diff --git a/x11-fm/rox-filer/Makefile b/x11-fm/rox-filer/Makefile index 66a77ff773f..2d44fb36178 100644 --- a/x11-fm/rox-filer/Makefile +++ b/x11-fm/rox-filer/Makefile @@ -1,6 +1,6 @@ PORTNAME= rox-filer PORTVERSION= 2.11 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= x11-fm gnome MASTER_SITES= SF/rox/rox/${PORTVERSION} diff --git a/x11-fm/sushi/Makefile b/x11-fm/sushi/Makefile index 2bdae626597..a67a12793a5 100644 --- a/x11-fm/sushi/Makefile +++ b/x11-fm/sushi/Makefile @@ -2,6 +2,7 @@ PORTNAME= sushi PORTVERSION= 41.0 +PORTREVISION= 1 CATEGORIES= x11-fm gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/x11-fonts/font-manager/Makefile b/x11-fonts/font-manager/Makefile index 8bbffcfde9f..baba39d5590 100644 --- a/x11-fonts/font-manager/Makefile +++ b/x11-fonts/font-manager/Makefile @@ -2,6 +2,7 @@ PORTNAME= font-manager PORTVERSION= 0.8.8 +PORTREVISION= 1 CATEGORIES= x11-fonts gnome MAINTAINER= ehaupt@FreeBSD.org diff --git a/x11-fonts/py-ufoprocessor/Makefile b/x11-fonts/py-ufoprocessor/Makefile index ec1fafeee9a..4bd0e867ae1 100644 --- a/x11-fonts/py-ufoprocessor/Makefile +++ b/x11-fonts/py-ufoprocessor/Makefile @@ -2,6 +2,7 @@ PORTNAME= ufoprocessor PORTVERSION= 1.9.0 +PORTREVISION= 1 CATEGORIES= x11-fonts python MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -18,7 +19,7 @@ RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}defcon>=0.6.0:x11-fonts/py-defcon@${PY_FLAVO ${PYTHON_PKGNAMEPREFIX}fontMath>=0.4.9:x11-fonts/py-fontMath@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}fontparts>=0.8.2:print/py-fontparts@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}fonttools>=3.32.0:print/py-fonttools@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}mutatormath>=2.1.2:math/py-mutatormath@${PY_FLAVOR} + ${PYTHON_PKGNAMEPREFIX}MutatorMath>=2.1.2:math/py-MutatorMath@${PY_FLAVOR} USES= python:3.7+ zip USE_PYTHON= autoplist concurrent distutils diff --git a/x11-themes/adapta-backgrounds/Makefile b/x11-themes/adapta-backgrounds/Makefile index 59c09fcafbc..b0fc724db1c 100644 --- a/x11-themes/adapta-backgrounds/Makefile +++ b/x11-themes/adapta-backgrounds/Makefile @@ -2,6 +2,7 @@ PORTNAME= adapta-backgrounds PORTVERSION= 0.5.2.3 +PORTREVISION= 1 CATEGORIES= x11-themes MAINTAINER= nivit@FreeBSD.org diff --git a/x11-themes/adapta-gtk-theme/Makefile b/x11-themes/adapta-gtk-theme/Makefile index 1cb4b1bc4fa..ad32ed38a23 100644 --- a/x11-themes/adapta-gtk-theme/Makefile +++ b/x11-themes/adapta-gtk-theme/Makefile @@ -2,7 +2,7 @@ PORTNAME= adapta-gtk-theme PORTVERSION= 3.95.0.11 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11-themes MAINTAINER= nivit@FreeBSD.org diff --git a/x11-themes/plata-theme/Makefile b/x11-themes/plata-theme/Makefile index 24a580802c5..6854002172d 100644 --- a/x11-themes/plata-theme/Makefile +++ b/x11-themes/plata-theme/Makefile @@ -1,5 +1,6 @@ PORTNAME= plata-theme PORTVERSION= 0.9.9 +PORTREVISION= 1 CATEGORIES= x11-themes MAINTAINER= tagattie@FreeBSD.org diff --git a/x11-toolkits/copperspice/Makefile b/x11-toolkits/copperspice/Makefile index a475ef38b1a..a1837dd0796 100644 --- a/x11-toolkits/copperspice/Makefile +++ b/x11-toolkits/copperspice/Makefile @@ -1,6 +1,6 @@ PORTNAME= copperspice DISTVERSION= 1.7.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11-toolkits MASTER_SITES= https://download.copperspice.com/${PORTNAME}/source/ diff --git a/x11-toolkits/gdl/Makefile b/x11-toolkits/gdl/Makefile index 6a110d7da48..c7a953fe4de 100644 --- a/x11-toolkits/gdl/Makefile +++ b/x11-toolkits/gdl/Makefile @@ -2,6 +2,7 @@ PORTNAME= gdl PORTVERSION= 3.34.0 +PORTREVISION= 1 CATEGORIES= x11-toolkits gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome3 diff --git a/x11-toolkits/gnocl/Makefile b/x11-toolkits/gnocl/Makefile index e5ad742cd85..bd6a259c452 100644 --- a/x11-toolkits/gnocl/Makefile +++ b/x11-toolkits/gnocl/Makefile @@ -2,7 +2,7 @@ PORTNAME= gnocl DISTVERSION= ${BASE_VERSION}-${NIGHTLY_BUILD} -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11-toolkits devel MASTER_SITES= SF/${PORTNAME}/Gnocl-Nightly-Build/ diff --git a/x11-toolkits/gstreamer1-plugins-gtk/Makefile b/x11-toolkits/gstreamer1-plugins-gtk/Makefile index 2a05109167f..34ccfe337de 100644 --- a/x11-toolkits/gstreamer1-plugins-gtk/Makefile +++ b/x11-toolkits/gstreamer1-plugins-gtk/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= x11-toolkits GST_PLUGIN_SUFFIX= -gtk diff --git a/x11-toolkits/gstreamer1-plugins-pango/Makefile b/x11-toolkits/gstreamer1-plugins-pango/Makefile index 53b4896ad3b..55d4efa1aec 100644 --- a/x11-toolkits/gstreamer1-plugins-pango/Makefile +++ b/x11-toolkits/gstreamer1-plugins-pango/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= x11-toolkits COMMENT= GStreamer pango textoverlay plugin diff --git a/x11-toolkits/gtk-sharp20/Makefile b/x11-toolkits/gtk-sharp20/Makefile index cb6a47b0d54..1f3e226369e 100644 --- a/x11-toolkits/gtk-sharp20/Makefile +++ b/x11-toolkits/gtk-sharp20/Makefile @@ -2,7 +2,7 @@ PORTNAME= gtk-sharp PORTVERSION= 2.12.45 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11-toolkits MASTER_SITES= http://download.mono-project.com/sources/gtk-sharp212/ PKGNAMESUFFIX= 20 diff --git a/x11-toolkits/gtkmathview/Makefile b/x11-toolkits/gtkmathview/Makefile index def9fa564fe..38d4554f715 100644 --- a/x11-toolkits/gtkmathview/Makefile +++ b/x11-toolkits/gtkmathview/Makefile @@ -2,7 +2,7 @@ PORTNAME= gtkmathview PORTVERSION= 0.8.0 -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= x11-toolkits MASTER_SITES= http://helm.cs.unibo.it/mml-widget/sources/ diff --git a/x11-toolkits/gtkmm24/Makefile b/x11-toolkits/gtkmm24/Makefile index 2c2cdf30ca1..985390c95e0 100644 --- a/x11-toolkits/gtkmm24/Makefile +++ b/x11-toolkits/gtkmm24/Makefile @@ -2,7 +2,7 @@ PORTNAME= gtkmm PORTVERSION= 2.24.5 -PORTREVISION?= 1 +PORTREVISION?= 2 CATEGORIES= x11-toolkits MASTER_SITES= GNOME PKGNAMESUFFIX= 24 diff --git a/x11-toolkits/gtkmm30/Makefile b/x11-toolkits/gtkmm30/Makefile index b7c3d9db3c8..1d7d284c5bc 100644 --- a/x11-toolkits/gtkmm30/Makefile +++ b/x11-toolkits/gtkmm30/Makefile @@ -2,7 +2,7 @@ PORTNAME= gtkmm PORTVERSION= 3.24.2 -PORTREVISION?= 0 +PORTREVISION?= 1 CATEGORIES= x11-toolkits MASTER_SITES= GNOME PKGNAMESUFFIX= 30 diff --git a/x11-toolkits/gtksourceview2/Makefile b/x11-toolkits/gtksourceview2/Makefile index 388a838ca58..edb8f260a13 100644 --- a/x11-toolkits/gtksourceview2/Makefile +++ b/x11-toolkits/gtksourceview2/Makefile @@ -3,7 +3,7 @@ PORTNAME= gtksourceview2 PORTVERSION= 2.10.5 -PORTREVISION?= 5 +PORTREVISION?= 6 CATEGORIES= x11-toolkits gnome MASTER_SITES= GNOME/sources/${PORTNAME:S/2$//}/${PORTVERSION:C/^([0-9]+\.[0-9]+).*/\1/} DISTNAME= ${PORTNAME:S/2$//}-${PORTVERSION} diff --git a/x11-toolkits/gtksourceview3/Makefile b/x11-toolkits/gtksourceview3/Makefile index 2c5c3ddbf3d..e926680f594 100644 --- a/x11-toolkits/gtksourceview3/Makefile +++ b/x11-toolkits/gtksourceview3/Makefile @@ -2,6 +2,7 @@ PORTNAME= gtksourceview PORTVERSION= 3.24.11 +PORTREVISION= 1 CATEGORIES= x11-toolkits gnome MASTER_SITES= GNOME PKGNAMESUFFIX= 3 diff --git a/x11-toolkits/gtksourceview4/Makefile b/x11-toolkits/gtksourceview4/Makefile index bc87dce1219..a2c2ba2ef73 100644 --- a/x11-toolkits/gtksourceview4/Makefile +++ b/x11-toolkits/gtksourceview4/Makefile @@ -2,6 +2,7 @@ PORTNAME= gtksourceview PORTVERSION= 4.8.1 +PORTREVISION= 1 CATEGORIES= x11-toolkits gnome MASTER_SITES= GNOME PKGNAMESUFFIX= 4 diff --git a/x11-toolkits/gtksourceview5/Makefile b/x11-toolkits/gtksourceview5/Makefile index 822b902de5a..1637fd3b5ee 100644 --- a/x11-toolkits/gtksourceview5/Makefile +++ b/x11-toolkits/gtksourceview5/Makefile @@ -2,6 +2,7 @@ PORTNAME= gtksourceview PORTVERSION= 5.4.0 +PORTREVISION= 1 CATEGORIES= x11-toolkits gnome MASTER_SITES= GNOME PKGNAMESUFFIX= 5 diff --git a/x11-toolkits/gtksourceviewmm3/Makefile b/x11-toolkits/gtksourceviewmm3/Makefile index c5a829ae836..00ffeb39bb2 100644 --- a/x11-toolkits/gtksourceviewmm3/Makefile +++ b/x11-toolkits/gtksourceviewmm3/Makefile @@ -3,6 +3,7 @@ PORTNAME= gtksourceviewmm PORTVERSION= 3.18.0 +PORTREVISION= 1 CATEGORIES= x11-toolkits gnome MASTER_SITES= GNOME PKGNAMESUFFIX= 3 diff --git a/x11-toolkits/guile-gnome-platform/Makefile b/x11-toolkits/guile-gnome-platform/Makefile index 85ac0942eea..4f735be16d3 100644 --- a/x11-toolkits/guile-gnome-platform/Makefile +++ b/x11-toolkits/guile-gnome-platform/Makefile @@ -1,6 +1,6 @@ PORTNAME= guile-gnome-platform DISTVERSION= 2.16.5 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= x11-toolkits MASTER_SITES= GNU/guile-gnome/${PORTNAME} diff --git a/x11-toolkits/libadwaita/Makefile b/x11-toolkits/libadwaita/Makefile index bbd24909be9..7d2bf4ccf78 100644 --- a/x11-toolkits/libadwaita/Makefile +++ b/x11-toolkits/libadwaita/Makefile @@ -2,7 +2,6 @@ PORTNAME= libadwaita PORTVERSION= 1.1.0 CATEGORIES= x11-toolkits -# WARNING: ABI is unstable, often breaks Rust-based consumers e.g., solanum, authenticator. MAINTAINER= gnome@FreeBSD.org COMMENT= Building blocks for modern GNOME applications diff --git a/x11-toolkits/libsexy/Makefile b/x11-toolkits/libsexy/Makefile index 699e049a72a..8c5e32c77ad 100644 --- a/x11-toolkits/libsexy/Makefile +++ b/x11-toolkits/libsexy/Makefile @@ -2,7 +2,7 @@ PORTNAME= libsexy PORTVERSION= 0.1.11 -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= x11-toolkits devel MASTER_SITES= http://releases.chipx86.com/libsexy/libsexy/ diff --git a/x11-toolkits/ocaml-lablgtk2/Makefile b/x11-toolkits/ocaml-lablgtk2/Makefile index fc389856ec6..f6f4c9114a1 100644 --- a/x11-toolkits/ocaml-lablgtk2/Makefile +++ b/x11-toolkits/ocaml-lablgtk2/Makefile @@ -2,7 +2,7 @@ PORTNAME= lablgtk2 PORTVERSION= 2.18.11 -PORTREVISION= 4 +PORTREVISION= 5 CATEGORIES= x11-toolkits PKGNAMEPREFIX= ocaml- diff --git a/x11-toolkits/p5-Glade2/Makefile b/x11-toolkits/p5-Glade2/Makefile index 508ddb4ed0d..a8758ac3a31 100644 --- a/x11-toolkits/p5-Glade2/Makefile +++ b/x11-toolkits/p5-Glade2/Makefile @@ -2,7 +2,7 @@ PORTNAME= Glade PORTVERSION= 1.007 -PORTREVISION= 5 +PORTREVISION= 6 CATEGORIES= x11-toolkits gnome perl5 MASTER_SITES= CPAN MASTER_SITE_SUBDIR= CPAN:TSCH diff --git a/x11-toolkits/p5-Gtk2-GladeXML/Makefile b/x11-toolkits/p5-Gtk2-GladeXML/Makefile index c778599ba50..4549ffdc0c7 100644 --- a/x11-toolkits/p5-Gtk2-GladeXML/Makefile +++ b/x11-toolkits/p5-Gtk2-GladeXML/Makefile @@ -2,6 +2,7 @@ PORTNAME= Gtk2-GladeXML PORTVERSION= 1.008 +PORTREVISION= 1 CATEGORIES= x11-toolkits perl5 MASTER_SITES= CPAN PKGNAMEPREFIX= p5- diff --git a/x11-toolkits/pangomm/Makefile b/x11-toolkits/pangomm/Makefile index 6fe720caf01..a3d2d3c4ecc 100644 --- a/x11-toolkits/pangomm/Makefile +++ b/x11-toolkits/pangomm/Makefile @@ -2,7 +2,7 @@ PORTNAME= pangomm PORTVERSION= 2.40.1 -PORTREVISION?= 4 +PORTREVISION?= 5 CATEGORIES= x11-toolkits MASTER_SITES= GNOME DIST_SUBDIR= gnome2 diff --git a/x11-toolkits/py-qt5-chart/Makefile b/x11-toolkits/py-qt5-chart/Makefile index 062cee667fd..5b25456686b 100644 --- a/x11-toolkits/py-qt5-chart/Makefile +++ b/x11-toolkits/py-qt5-chart/Makefile @@ -1,5 +1,6 @@ PORTNAME= chart PORTVERSION= ${PYQTCHART_VERSION} +PORTREVISION= 1 CATEGORIES= x11-toolkits devel python MASTER_SITES= ${MASTER_SITES_PYQTCHART} PKGNAMEPREFIX= ${PYQT_PY_RELNAME}- diff --git a/x11-toolkits/rubygem-gtksourceview3/Makefile b/x11-toolkits/rubygem-gtksourceview3/Makefile index afdc6b8811d..b83ab414071 100644 --- a/x11-toolkits/rubygem-gtksourceview3/Makefile +++ b/x11-toolkits/rubygem-gtksourceview3/Makefile @@ -1,5 +1,6 @@ PORTNAME= gtksourceview3 PORTVERSION= 3.5.1 +PORTREVISION= 1 CATEGORIES= x11-toolkits rubygems MASTER_SITES= RG diff --git a/x11-toolkits/rubygem-gtksourceview4/Makefile b/x11-toolkits/rubygem-gtksourceview4/Makefile index cea448cfa71..f87b7882372 100644 --- a/x11-toolkits/rubygem-gtksourceview4/Makefile +++ b/x11-toolkits/rubygem-gtksourceview4/Makefile @@ -1,5 +1,6 @@ PORTNAME= gtksourceview4 PORTVERSION= 3.5.1 +PORTREVISION= 1 CATEGORIES= x11-toolkits rubygems MASTER_SITES= RG diff --git a/x11-toolkits/tepl/Makefile b/x11-toolkits/tepl/Makefile index 533655b87f9..4e6e93efd39 100644 --- a/x11-toolkits/tepl/Makefile +++ b/x11-toolkits/tepl/Makefile @@ -1,6 +1,6 @@ PORTNAME= tepl DISTVERSION= 5.1.1 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= x11-toolkits MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/x11-toolkits/tepl6/Makefile b/x11-toolkits/tepl6/Makefile index 081878aab58..5db5da1989a 100644 --- a/x11-toolkits/tepl6/Makefile +++ b/x11-toolkits/tepl6/Makefile @@ -1,6 +1,6 @@ PORTNAME= tepl DISTVERSION= 6.00.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11-toolkits MASTER_SITES= GNOME PKGNAMESUFFIX= 6 diff --git a/x11-wm/compiz-plugins-extra/Makefile b/x11-wm/compiz-plugins-extra/Makefile index 1a1502c27c3..8d0a90bc785 100644 --- a/x11-wm/compiz-plugins-extra/Makefile +++ b/x11-wm/compiz-plugins-extra/Makefile @@ -2,7 +2,7 @@ PORTNAME= compiz-plugins-extra PORTVERSION= 0.8.8 -PORTREVISION= 9 +PORTREVISION= 10 CATEGORIES= x11-wm MASTER_SITES= http://releases.compiz.org/${PORTVERSION}/ diff --git a/x11-wm/compiz-plugins-main/Makefile b/x11-wm/compiz-plugins-main/Makefile index f8bf8bb3413..ac6a5830e65 100644 --- a/x11-wm/compiz-plugins-main/Makefile +++ b/x11-wm/compiz-plugins-main/Makefile @@ -2,7 +2,7 @@ PORTNAME= compiz-plugins-main PORTVERSION= 0.8.8 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= x11-wm MASTER_SITES= https://BSDforge.com/projects/source/x11-wm/compiz-plugins-main/ diff --git a/x11-wm/compiz-plugins-unsupported/Makefile b/x11-wm/compiz-plugins-unsupported/Makefile index a19accb40fa..16c81f27727 100644 --- a/x11-wm/compiz-plugins-unsupported/Makefile +++ b/x11-wm/compiz-plugins-unsupported/Makefile @@ -2,7 +2,7 @@ PORTNAME= compiz-plugins-unsupported PORTVERSION= 0.8.8 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= x11-wm MASTER_SITES= http://releases.compiz.org/${PORTVERSION}/ \ https://BSDforge.com/prolects/source/x11-wm/compiz-plugins-unsupported/ diff --git a/x11-wm/compiz/Makefile b/x11-wm/compiz/Makefile index e1316e68e98..d78beeae20f 100644 --- a/x11-wm/compiz/Makefile +++ b/x11-wm/compiz/Makefile @@ -1,6 +1,6 @@ PORTNAME= compiz PORTVERSION= 0.8.8 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= x11-wm MASTER_SITES= https://BSDforge.com/projects/source/x11-wm/compiz/ diff --git a/x11-wm/compizconfig-backend-gconf/Makefile b/x11-wm/compizconfig-backend-gconf/Makefile index 8644d29372a..e88661f8aa0 100644 --- a/x11-wm/compizconfig-backend-gconf/Makefile +++ b/x11-wm/compizconfig-backend-gconf/Makefile @@ -2,7 +2,7 @@ PORTNAME= compizconfig-backend-gconf PORTVERSION= 0.8.8 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= x11-wm MASTER_SITES= https://BSDforge.com/projects/source/x11-wm/compizconfig-backend-gconf/ diff --git a/x11-wm/labwc/Makefile b/x11-wm/labwc/Makefile index 4a3aa55d3c3..b25a69a2c9b 100644 --- a/x11-wm/labwc/Makefile +++ b/x11-wm/labwc/Makefile @@ -1,5 +1,6 @@ PORTNAME= labwc DISTVERSION= 0.5.0 +PORTREVISION= 1 CATEGORIES= x11-wm MAINTAINER= jbeich@FreeBSD.org diff --git a/x11-wm/libcompizconfig/Makefile b/x11-wm/libcompizconfig/Makefile index 1fecc957e5d..f9d0eb80f24 100644 --- a/x11-wm/libcompizconfig/Makefile +++ b/x11-wm/libcompizconfig/Makefile @@ -2,6 +2,7 @@ PORTNAME= libcompizconfig PORTVERSION= 0.8.8 +PORTREVISION= 1 CATEGORIES= x11-wm MASTER_SITES= https://BSDforge.com/projects/source/x11-wm/libcompizconfig/ diff --git a/x11-wm/lxappearance-obconf/Makefile b/x11-wm/lxappearance-obconf/Makefile index f3d49950dd7..7f775cc2c30 100644 --- a/x11-wm/lxappearance-obconf/Makefile +++ b/x11-wm/lxappearance-obconf/Makefile @@ -2,7 +2,7 @@ PORTNAME= lxappearance-obconf PORTVERSION= 0.2.3 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11-wm MASTER_SITES= SF/lxde/LXAppearance%20Obconf/ diff --git a/x11-wm/lxsession/Makefile b/x11-wm/lxsession/Makefile index ff851a45727..c9260cf6353 100644 --- a/x11-wm/lxsession/Makefile +++ b/x11-wm/lxsession/Makefile @@ -2,6 +2,7 @@ PORTNAME= lxsession PORTVERSION= 0.5.4 +PORTREVISION= 1 CATEGORIES= x11-wm MASTER_SITES= SF/lxde/LXSession%20%28session%20manager%29/LXSession%200.5.x/ diff --git a/x11-wm/obconf-qt/Makefile b/x11-wm/obconf-qt/Makefile index 6bbb3a7d2eb..18bbe2bed9e 100644 --- a/x11-wm/obconf-qt/Makefile +++ b/x11-wm/obconf-qt/Makefile @@ -2,6 +2,7 @@ PORTNAME= obconf-qt PORTVERSION= 0.16.1 +PORTREVISION= 1 CATEGORIES= x11-wm MASTER_SITES= LXQT/${PORTNAME} diff --git a/x11-wm/obconf/Makefile b/x11-wm/obconf/Makefile index 8c0c1ff0005..eaaf63c6efb 100644 --- a/x11-wm/obconf/Makefile +++ b/x11-wm/obconf/Makefile @@ -2,7 +2,7 @@ PORTNAME= obconf PORTVERSION= 2.0.4 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= x11-wm MASTER_SITES= http://openbox.org/dist/obconf/ diff --git a/x11-wm/openbox/Makefile b/x11-wm/openbox/Makefile index 10c72ceb827..245067bfa6c 100644 --- a/x11-wm/openbox/Makefile +++ b/x11-wm/openbox/Makefile @@ -2,7 +2,7 @@ PORTNAME= openbox PORTVERSION= 3.6 -PORTREVISION= 7 +PORTREVISION= 8 CATEGORIES= x11-wm MASTER_SITES= http://openbox.org/dist/openbox/ diff --git a/x11-wm/phoc/Makefile b/x11-wm/phoc/Makefile index 575ed263633..9946a3d07e5 100644 --- a/x11-wm/phoc/Makefile +++ b/x11-wm/phoc/Makefile @@ -1,6 +1,6 @@ PORTNAME= phoc DISTVERSIONPREFIX= v -DISTVERSION= 0.12.0 +DISTVERSION= 0.13.0 CATEGORIES= x11-wm MAINTAINER= jbeich@FreeBSD.org @@ -25,7 +25,7 @@ USE_GNOME= gnomedesktop3 USE_XORG= pixman xcb GL_SITE= https://gitlab.gnome.org GL_ACCOUNT= World/Phosh -GL_COMMIT= eb62bea7cfb3ca0a4fe417811a868e3ac95b81c5 +GL_COMMIT= cbcfdcb7de84863010827c01b0c47781adb13f74 SHEBANG_FILES= build-aux/post_install.py CFLAGS+= -Wno-error=format-nonliteral # clang PLIST_FILES= bin/${PORTNAME} diff --git a/x11-wm/phoc/Makefile.wlroots b/x11-wm/phoc/Makefile.wlroots index 1deb4da7f90..efa48cc75c4 100644 --- a/x11-wm/phoc/Makefile.wlroots +++ b/x11-wm/phoc/Makefile.wlroots @@ -10,6 +10,6 @@ LIB_DEPENDS:= ${LIB_DEPENDS:N*wlroots*} \ libxcb-icccm.so:x11/xcb-util-wm RUN_DEPENDS+= xwayland-devel>0:x11-servers/xwayland-devel USE_GL+= gbm egl -GL_TUPLE+= https://source.puri.sm:Librem5:wlroots:2fce64d30d378d7009a5770b2472231a0e535ada:wlroots/subprojects/wlroots +GL_TUPLE+= https://source.puri.sm:Librem5:wlroots:c9cd3502ba6113070020de50e41f0af72e0f5031:wlroots/subprojects/wlroots MESON_ARGS+= -Dwlroots:default_library=static PLIST_FILES+= "@comment libdata/pkgconfig/wlroots.pc" diff --git a/x11-wm/phoc/distinfo b/x11-wm/phoc/distinfo index 334bb6f6215..0be3a889f71 100644 --- a/x11-wm/phoc/distinfo +++ b/x11-wm/phoc/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1643117370 -SHA256 (World/Phosh-phoc-eb62bea7cfb3ca0a4fe417811a868e3ac95b81c5_GL0.tar.gz) = b660e1b55e41cdcea182fed9a2428bdabc5b071e289d91e00bcb0f2534e2349f -SIZE (World/Phosh-phoc-eb62bea7cfb3ca0a4fe417811a868e3ac95b81c5_GL0.tar.gz) = 164239 -SHA256 (Librem5-wlroots-2fce64d30d378d7009a5770b2472231a0e535ada_GL0.tar.gz) = 33261e862963b790c967b39ace715ba54f33c396d32cc407e3951d2a39ffd313 -SIZE (Librem5-wlroots-2fce64d30d378d7009a5770b2472231a0e535ada_GL0.tar.gz) = 506577 +TIMESTAMP = 1648223314 +SHA256 (World/Phosh-phoc-cbcfdcb7de84863010827c01b0c47781adb13f74_GL0.tar.gz) = d9b79d2a25c74b2eba4255997bff43dd045757d43960200e7537ac36b55adb4c +SIZE (World/Phosh-phoc-cbcfdcb7de84863010827c01b0c47781adb13f74_GL0.tar.gz) = 166533 +SHA256 (Librem5-wlroots-c9cd3502ba6113070020de50e41f0af72e0f5031_GL0.tar.gz) = e8fc48f78e744f6fe201fe8ea19aa66079d6deb9f83fbe39e679db30e444ce2b +SIZE (Librem5-wlroots-c9cd3502ba6113070020de50e41f0af72e0f5031_GL0.tar.gz) = 506612 diff --git a/x11-wm/qtile/Makefile b/x11-wm/qtile/Makefile index a26a1f1c504..ae2d24dc3e2 100644 --- a/x11-wm/qtile/Makefile +++ b/x11-wm/qtile/Makefile @@ -2,6 +2,7 @@ PORTNAME= qtile PORTVERSION= 0.18.1 +PORTREVISION= 1 CATEGORIES= x11-wm MASTER_SITES= CHEESESHOP PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} @@ -23,7 +24,6 @@ LIB_DEPENDS= libpangocairo-1.0.so:x11-toolkits/pango \ libpulse.so:audio/pulseaudio RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}xcffib>=0.5.0:x11/py-xcffib@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}cairocffi>=0.9:graphics/py-cairocffi@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}asyncio>0:devel/py-asyncio@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}dbus>=0.8:devel/py-dbus@${PY_FLAVOR} USE_GNOME= pygobject3 diff --git a/x11/alltray/Makefile b/x11/alltray/Makefile index 269718697bc..968e7e02dfd 100644 --- a/x11/alltray/Makefile +++ b/x11/alltray/Makefile @@ -2,7 +2,7 @@ PORTNAME= alltray PORTVERSION= 0.70 -PORTREVISION= 6 +PORTREVISION= 7 CATEGORIES= x11 MASTER_SITES= SF diff --git a/x11/apwal/Makefile b/x11/apwal/Makefile index 4a6928fc3d8..241bb78e35c 100644 --- a/x11/apwal/Makefile +++ b/x11/apwal/Makefile @@ -2,7 +2,7 @@ PORTNAME= apwal PORTVERSION= 0.4.5 -PORTREVISION= 12 +PORTREVISION= 13 CATEGORIES= x11 MASTER_SITES= http://apwal.free.fr/download/ diff --git a/x11/cinnamon-screensaver/Makefile b/x11/cinnamon-screensaver/Makefile index 8db0d4e03f7..28da56ed68c 100644 --- a/x11/cinnamon-screensaver/Makefile +++ b/x11/cinnamon-screensaver/Makefile @@ -2,6 +2,7 @@ PORTNAME= cinnamon-screensaver PORTVERSION= 4.8.1 +PORTREVISION= 1 CATEGORIES= x11 gnome DIST_SUBDIR= gnome diff --git a/x11/cinnamon/Makefile b/x11/cinnamon/Makefile index 2b6489c9fa0..a321adae931 100644 --- a/x11/cinnamon/Makefile +++ b/x11/cinnamon/Makefile @@ -2,7 +2,7 @@ PORTNAME= cinnamon PORTVERSION= 4.8.6 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11 gnome DIST_SUBDIR= gnome diff --git a/x11/gdm/Makefile b/x11/gdm/Makefile index d724d456eee..eb2116ffa0b 100644 --- a/x11/gdm/Makefile +++ b/x11/gdm/Makefile @@ -2,6 +2,7 @@ PORTNAME= gdm PORTVERSION= 42.0 +PORTREVISION= 1 CATEGORIES= x11 gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/x11/gdm/files/patch-data_Init.in b/x11/gdm/files/patch-data_Init.in deleted file mode 100644 index 7ec028b66b5..00000000000 --- a/x11/gdm/files/patch-data_Init.in +++ /dev/null @@ -1,66 +0,0 @@ ---- data/Init.in.orig 2022-03-21 18:12:40 UTC -+++ data/Init.in -@@ -6,6 +6,19 @@ OLD_IFS=$IFS - PATH="@X_PATH@:$PATH" - OLD_IFS=$IFS - -+# enhance fade in look; -+# gnome-shell-3.38.1/data/theme/gnome-shell-sass/widgets/_screen-shield.scss: -+# #lockDialogGroup { -+# background-color: lighten(#2e3436, 8%); -+# } -+xsetroot -solid "#2e3436" -+ -+# wait for ttys to be initialized -+while ! pgrep -qf "^/usr/libexec/getty "; do -+ sleep 1 -+ [ $((i++)) -ge 10 ] && break -+done -+ - gdmwhich () { - COMMAND="$1" - OUTPUT= -@@ -22,10 +35,10 @@ gdmwhich () { - echo "$OUTPUT" - } - --if [ -f /etc/X11/Xresources ]; then -- sysresources=/etc/X11/Xresources -+if [ -f /usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/Xresources ]; then -+ sysresources=/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/Xresources - else -- sysresources=/usr/etc/X11/Xresources -+ sysresources=/usr/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/Xresources - fi - - # merge in defaults -@@ -33,10 +46,10 @@ fi - xrdb -nocpp -merge "$sysresources" - fi - --if [ -f /etc/X11/Xmodmap ]; then -- sysmodmap=/etc/X11/Xmodmap -+if [ -f /usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/Xmodmap ]; then -+ sysmodmap=/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/Xmodmap - else -- sysmodmap=/usr/etc/X11/Xmodmap -+ sysmodmap=/usr/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/Xmodmap - fi - - XMODMAP=`gdmwhich xmodmap` -@@ -93,5 +106,15 @@ fi - fi - fi - fi -+ -+# /usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/xenodm/GiveConsole, fbtab(5) -+if [ -c /dev/dri/card0 ]; then -+ /usr/sbin/chown _gdm /dev/dri/card0 -+fi -+if [ -c /dev/dri/renderD128 ]; then -+ /usr/sbin/chown _gdm /dev/dri/renderD128 -+fi -+# XXX OpenBSD needs an implementation of XDG_RUNTIME_DIR -+rm -rf /var/db/gdm/.cache/gnome-shell/runtime-state-* - - exit 0 diff --git a/x11/gdm/files/patch-data_PostSession.in b/x11/gdm/files/patch-data_PostSession.in index d8752cc882b..bb4969327de 100644 --- a/x11/gdm/files/patch-data_PostSession.in +++ b/x11/gdm/files/patch-data_PostSession.in @@ -1,14 +1,10 @@ ---- data/PostSession.in.orig 2022-03-21 18:12:40 UTC +--- data/PostSession.in.orig 2022-03-27 19:50:55 UTC +++ data/PostSession.in -@@ -1,3 +1,11 @@ +@@ -1,3 +1,7 @@ #!/bin/sh -+# /usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/xenodm/TakeConsole, fbtab(5) -+if [ -c /dev/dri/card0 ]; then -+ /usr/sbin/chown root /dev/dri/card0 -+fi -+if [ -c /dev/dri/renderD128 ]; then -+ /usr/sbin/chown root /dev/dri/renderD128 -+fi ++# XXX: I hate this "solution", but we need to manually restart gdm here ++# otherwise we reach the console when we logout ++/usr/sbin/service gdm restart + exit 0 diff --git a/x11/gdm/files/patch-data_PreSession.in b/x11/gdm/files/patch-data_PreSession.in deleted file mode 100644 index 31abf42f57d..00000000000 --- a/x11/gdm/files/patch-data_PreSession.in +++ /dev/null @@ -1,14 +0,0 @@ ---- data/PreSession.in.orig 2022-03-21 18:12:40 UTC -+++ data/PreSession.in -@@ -7,3 +7,11 @@ PATH="@X_PATH@:$PATH" - # Note that output goes into the .xsession-errors file for easy debugging - # - PATH="@X_PATH@:$PATH" -+ -+# /usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/usr/local/etc/X11/xenodm/GiveConsole, fbtab(5) -+if [ -c /dev/dri/card0 ]; then -+ /usr/sbin/chown $USER /dev/dri/card0 -+fi -+if [ -c /dev/dri/renderD128 ]; then -+ /usr/sbin/chown $USER /dev/dri/renderD128 -+fi diff --git a/x11/gnome-shell/Makefile b/x11/gnome-shell/Makefile index 2e813a05965..8bd8301dd92 100644 --- a/x11/gnome-shell/Makefile +++ b/x11/gnome-shell/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-shell PORTVERSION= 41.4 +PORTREVISION= 2 CATEGORIES= x11 gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome diff --git a/x11/gnome-shell/files/patch-js_ui_environment.js b/x11/gnome-shell/files/patch-js_ui_environment.js new file mode 100644 index 00000000000..0bd7baa7a9e --- /dev/null +++ b/x11/gnome-shell/files/patch-js_ui_environment.js @@ -0,0 +1,10 @@ +--- js/ui/environment.js.orig 2022-03-27 15:10:52 UTC ++++ js/ui/environment.js +@@ -6,6 +6,7 @@ imports.gi.versions.GdkPixbuf = '2.0'; + imports.gi.versions.Clutter = Config.LIBMUTTER_API_VERSION; + imports.gi.versions.Gio = '2.0'; + imports.gi.versions.GdkPixbuf = '2.0'; ++imports.gi.versions.GnomeDesktop = '3.0'; + imports.gi.versions.Gtk = '3.0'; + imports.gi.versions.Soup = '3.0'; + imports.gi.versions.TelepathyGLib = '0.12'; diff --git a/x11/gnome-terminal/Makefile b/x11/gnome-terminal/Makefile index 8911929ad92..3c8ece423ed 100644 --- a/x11/gnome-terminal/Makefile +++ b/x11/gnome-terminal/Makefile @@ -2,6 +2,7 @@ PORTNAME= gnome-terminal DISTVERSION= 3.42.2 +PORTREVISION= 1 CATEGORIES= x11 gnome MASTER_SITES= GNOME DIST_SUBDIR= gnome diff --git a/x11/gnome/Makefile b/x11/gnome/Makefile index e15e127dab4..3896b7ba5b9 100644 --- a/x11/gnome/Makefile +++ b/x11/gnome/Makefile @@ -29,7 +29,7 @@ RUN_DEPENDS?= dconf-editor:devel/dconf-editor \ zenity>=3.0.0:x11/zenity \ seahorse>=3.0.0:security/seahorse \ gnome-control-center>=3.0.0:sysutils/gnome-control-center \ - ${LOCALBASE}/share/gnome-background-properties/gnome-backgrounds.xml:x11-themes/gnome-backgrounds \ + gnome-backgrounds>=0:x11-themes/gnome-backgrounds \ caribou>=0:accessibility/caribou \ ${LOCALBASE}/share/sounds/freedesktop/index.theme:audio/freedesktop-sound-theme diff --git a/x11/gstreamer1-plugins-x/Makefile b/x11/gstreamer1-plugins-x/Makefile index c1086462417..e385e974967 100644 --- a/x11/gstreamer1-plugins-x/Makefile +++ b/x11/gstreamer1-plugins-x/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= x11 COMMENT= GStreamer X and Xvideo output plugins diff --git a/x11/gstreamer1-plugins-ximagesrc/Makefile b/x11/gstreamer1-plugins-ximagesrc/Makefile index 1926fc85534..f68bb3041ec 100644 --- a/x11/gstreamer1-plugins-ximagesrc/Makefile +++ b/x11/gstreamer1-plugins-ximagesrc/Makefile @@ -1,4 +1,4 @@ -PORTREVISION= 0 +PORTREVISION= 1 CATEGORIES= x11 PKGNAMESUFFIX= 1-plugins-ximagesrc diff --git a/x11/gsynaptics/Makefile b/x11/gsynaptics/Makefile index 53fc3e2b80a..cd01b9bb9c7 100644 --- a/x11/gsynaptics/Makefile +++ b/x11/gsynaptics/Makefile @@ -2,7 +2,7 @@ PORTNAME= gsynaptics PORTVERSION= 0.9.16 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= x11 MASTER_SITES= OSDN/${PORTNAME} diff --git a/x11/jgmenu/Makefile b/x11/jgmenu/Makefile index e4305040a36..d318b7f320a 100644 --- a/x11/jgmenu/Makefile +++ b/x11/jgmenu/Makefile @@ -1,6 +1,7 @@ PORTNAME= jgmenu DISTVERSIONPREFIX= v DISTVERSION= 4.4.0 +PORTREVISION= 1 CATEGORIES= x11 MAINTAINER= lcook@FreeBSD.org diff --git a/x11/keyboardcast/Makefile b/x11/keyboardcast/Makefile index 691008b54fa..2b0029d0004 100644 --- a/x11/keyboardcast/Makefile +++ b/x11/keyboardcast/Makefile @@ -3,7 +3,7 @@ PORTNAME= keyboardcast PORTVERSION= 0.1.1 DISTVERSIONPREFIX= v -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= x11 MAINTAINER= eimar.koort@gmail.com diff --git a/x11/libxkbcommon/Makefile b/x11/libxkbcommon/Makefile index c6f7308dd2b..95855923d5c 100644 --- a/x11/libxkbcommon/Makefile +++ b/x11/libxkbcommon/Makefile @@ -1,5 +1,6 @@ PORTNAME= libxkbcommon PORTVERSION= 1.4.0 +PORTREVISION= 1 CATEGORIES= x11 MASTER_SITES= https://xkbcommon.org/download/ diff --git a/x11/libxklavier/Makefile b/x11/libxklavier/Makefile index f7cb45ae563..848977a828d 100644 --- a/x11/libxklavier/Makefile +++ b/x11/libxklavier/Makefile @@ -2,7 +2,7 @@ PORTNAME= libxklavier PORTVERSION= 5.3 -PORTREVISION= 1 +PORTREVISION= 2 PORTEPOCH= 1 CATEGORIES= x11 gnome MASTER_SITES= GNOME diff --git a/x11/lxpanel/Makefile b/x11/lxpanel/Makefile index 931a4b760ba..bf709b4d499 100644 --- a/x11/lxpanel/Makefile +++ b/x11/lxpanel/Makefile @@ -2,6 +2,7 @@ PORTNAME= lxpanel PORTVERSION= 0.9.3 +PORTREVISION= 1 CATEGORIES= x11 MASTER_SITES= SF/lxde/LXPanel%20%28desktop%20panel%29/LXPanel%20${PORTVERSION:R}.x/ diff --git a/x11/mate-applets/Makefile b/x11/mate-applets/Makefile index e68270761f6..097ad429ddf 100644 --- a/x11/mate-applets/Makefile +++ b/x11/mate-applets/Makefile @@ -1,5 +1,6 @@ PORTNAME= mate-applets PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= x11 mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/x11/mate-session-manager/Makefile b/x11/mate-session-manager/Makefile index 0468cd4b342..1503732360e 100644 --- a/x11/mate-session-manager/Makefile +++ b/x11/mate-session-manager/Makefile @@ -2,6 +2,7 @@ PORTNAME= mate-session-manager PORTVERSION= 1.26.0 +PORTREVISION= 1 CATEGORIES= x11 mate MASTER_SITES= MATE DIST_SUBDIR= mate diff --git a/x11/nwg-launchers/Makefile b/x11/nwg-launchers/Makefile index cfcb2a7d03f..ed6d48de2c7 100644 --- a/x11/nwg-launchers/Makefile +++ b/x11/nwg-launchers/Makefile @@ -1,6 +1,7 @@ PORTNAME= nwg-launchers DISTVERSIONPREFIX= v DISTVERSION= 0.6.3 +PORTREVISION= 1 CATEGORIES= x11 PATCH_SITES= https://github.com/${GH_ACCOUNT}/${GH_PROJECT}/commit/ diff --git a/x11/pipeglade/Makefile b/x11/pipeglade/Makefile index 493dfe6a2d1..a459efbbf20 100644 --- a/x11/pipeglade/Makefile +++ b/x11/pipeglade/Makefile @@ -2,6 +2,7 @@ PORTNAME= pipeglade PORTVERSION= 4.7.0 +PORTREVISION= 1 CATEGORIES= x11 devel MAINTAINER= trebbu@googlemail.com diff --git a/x11/plank/Makefile b/x11/plank/Makefile index 700914c4bb5..efd0e4d86f7 100644 --- a/x11/plank/Makefile +++ b/x11/plank/Makefile @@ -2,6 +2,7 @@ PORTNAME= plank PORTVERSION= 0.11.89 +PORTREVISION= 1 CATEGORIES= x11 MASTER_SITES= https://launchpad.net/${PORTNAME}/1.0/${PORTVERSION}/+download/ diff --git a/x11/polybar/Makefile b/x11/polybar/Makefile index b788800852a..5c256ce0cf0 100644 --- a/x11/polybar/Makefile +++ b/x11/polybar/Makefile @@ -1,6 +1,5 @@ PORTNAME= polybar -DISTVERSION= 3.5.7 -PORTREVISION= 1 +DISTVERSION= 3.6.1 CATEGORIES= x11 MASTER_SITES= https://github.com/polybar/polybar/releases/download/${DISTVERSION}/ @@ -14,6 +13,7 @@ BUILD_DEPENDS= xcb-proto>=1.9:x11/xcb-proto LIB_DEPENDS= libfontconfig.so:x11-fonts/fontconfig \ libfreetype.so:print/freetype2 \ libinotify.so:devel/libinotify \ + libuv.so:devel/libuv \ libxcb-ewmh.so:x11/xcb-util-wm \ libxcb-icccm.so:x11/xcb-util-wm \ libxcb-image.so:x11/xcb-util-image \ @@ -28,10 +28,10 @@ USE_GNOME= cairo USE_XORG= xcb # Needs libnl to work. Disabling it completely. -CMAKE_OFF= ENABLE_NETWORK +CMAKE_OFF= ENABLE_NETWORK BUILD_DOC -OPTIONS_DEFINE= ALSA CURSOR DEBUG GITHUB I3 IPC MPD PULSEAUDIO \ - TESTS XKEYBOARD +OPTIONS_DEFINE= ALSA CURSOR DEBUG GITHUB I3 IPC MPD \ + PULSEAUDIO TESTS XKEYBOARD OPTIONS_DEFAULT= CURSOR GITHUB I3 IPC MPD PULSEAUDIO XKEYBOARD OPTIONS_SUB= yes @@ -50,11 +50,11 @@ ALSA_LIB_DEPENDS= libasound.so:audio/alsa-lib CURSOR_CMAKE_BOOL= WITH_XCURSOR CURSOR_LIB_DEPENDS= libxcb-cursor.so:x11/xcb-util-cursor +DEBUG_CMAKE_BOOL= DEBUG_LOGGER + GITHUB_CMAKE_BOOL= ENABLE_CURL GITHUB_LIB_DEPENDS= libcurl.so:ftp/curl -DEBUG_CMAKE_BOOL= DEBUG_LOGGER - I3_BUILD_DEPENDS= ${LOCALBASE}/include/i3/ipc.h:x11-wm/i3 I3_CMAKE_BOOL= ENABLE_I3 I3_LIB_DEPENDS= libjsoncpp.so:devel/jsoncpp @@ -75,6 +75,18 @@ TESTS_CMAKE_BOOL= BUILD_TESTS XKEYBOARD_CMAKE_BOOL= ENABLE_XKEYBOARD XKEYBOARD_LIB_DEPENDS= libxcb-xkb.so:x11/libxcb +ETCFIX = include/utils/command.hpp \ + contrib/bash/polybar \ + contrib/zsh/_polybar \ + doc/man/polybar.1.rst \ + doc/man/polybar.5.rst \ + CHANGELOG.md + +post-patch: +.for f in ${ETCFIX} + @${REINPLACE_CMD} -e 's|/etc|${PREFIX}/etc|g' ${WRKSRC}/${f} +.endfor + do-test-TESTS-on: cd ${TEST_WRKSRC} && ${SETENV} BUILD_TESTS=ON \ ${SH} ${WRKSRC}/common/travis/tests.sh diff --git a/x11/polybar/distinfo b/x11/polybar/distinfo index c398c5e505e..a9bf7664683 100644 --- a/x11/polybar/distinfo +++ b/x11/polybar/distinfo @@ -1,3 +1,3 @@ -TIMESTAMP = 1632255455 -SHA256 (polybar-3.5.7.tar.gz) = 73210e6d74217acb953b253990b4302343b7b6a7870fe1da9a1855daa44123db -SIZE (polybar-3.5.7.tar.gz) = 411318 +TIMESTAMP = 1646493488 +SHA256 (polybar-3.6.1.tar.gz) = a898ba8c847895be41efe5a31bb1b839da2c10b130b8411d5016982fbaadff7b +SIZE (polybar-3.6.1.tar.gz) = 437027 diff --git a/x11/polybar/files/patch-CMakeLists.txt b/x11/polybar/files/patch-CMakeLists.txt new file mode 100644 index 00000000000..00a661bf7aa --- /dev/null +++ b/x11/polybar/files/patch-CMakeLists.txt @@ -0,0 +1,11 @@ +--- CMakeLists.txt.orig 2022-03-27 18:35:56 UTC ++++ CMakeLists.txt +@@ -63,7 +63,7 @@ endif() + + if(BUILD_CONFIG) + install(FILES ${CMAKE_SOURCE_DIR}/doc/config.ini +- DESTINATION /etc/${PROJECT_NAME} ++ DESTINATION ${CMAKE_INSTALL_PREFIX}/etc/${PROJECT_NAME} + COMPONENT config) + endif() + diff --git a/x11/polybar/files/patch-src_utils_file.cpp b/x11/polybar/files/patch-src_utils_file.cpp new file mode 100644 index 00000000000..b2a5c05f39c --- /dev/null +++ b/x11/polybar/files/patch-src_utils_file.cpp @@ -0,0 +1,11 @@ +--- src/utils/file.cpp.orig 2022-03-27 18:43:31 UTC ++++ src/utils/file.cpp +@@ -322,7 +322,7 @@ namespace file_util { + possible_paths.push_back(xdg_config_dir + suffix + ".ini"); + } + +- possible_paths.push_back("/etc" + suffix + ".ini"); ++ possible_paths.push_back("${CMAKE_INSTALL_PREFIX}/etc" + suffix + ".ini"); + + for (const string& p : possible_paths) { + if (exists(p)) { diff --git a/x11/polybar/pkg-plist b/x11/polybar/pkg-plist index 1cd6ace0a6d..fae322946d7 100644 --- a/x11/polybar/pkg-plist +++ b/x11/polybar/pkg-plist @@ -1,5 +1,6 @@ bin/polybar -%%IPC%%bin/polybar-msg +bin/polybar-msg +%%ETCDIR%%/config.ini share/bash-completion/completions/polybar share/zsh/site-functions/_polybar share/zsh/site-functions/_polybar_msg diff --git a/x11/py-pyscreenshot/files/patch-setup.py b/x11/py-pyscreenshot/files/patch-setup.py new file mode 100644 index 00000000000..54ba48fed9f --- /dev/null +++ b/x11/py-pyscreenshot/files/patch-setup.py @@ -0,0 +1,12 @@ +--- setup.py.orig 2018-11-01 17:14:50 UTC ++++ setup.py +@@ -23,9 +23,6 @@ exec(open(os.path.join(NAME, 'about.py')).read()) + VERSION = __version__ + + extra = {} +-if sys.version_info >= (3,): +- extra['use_2to3'] = True +- extra['use_2to3_exclude_fixers'] = ['lib2to3.fixes.fix_import'] + + classifiers = [ + # Get more strings from diff --git a/x11/roxterm/Makefile b/x11/roxterm/Makefile index f98d2f36b91..d41d9a1b037 100644 --- a/x11/roxterm/Makefile +++ b/x11/roxterm/Makefile @@ -2,6 +2,7 @@ PORTNAME= roxterm PORTVERSION= 3.11.1 +PORTREVISION= 1 CATEGORIES= x11 MAINTAINER= nc@FreeBSD.org diff --git a/x11/simdock/Makefile b/x11/simdock/Makefile index 3b3da0f1c77..f0264642d71 100644 --- a/x11/simdock/Makefile +++ b/x11/simdock/Makefile @@ -2,7 +2,7 @@ PORTNAME= simdock PORTVERSION= 1.2 -PORTREVISION= 10 +PORTREVISION= 11 CATEGORIES= x11 MASTER_SITES= SF DISTNAME= ${PORTNAME}_${PORTVERSION} diff --git a/x11/swayr/Makefile b/x11/swayr/Makefile index d2898180b31..07ede31bd81 100644 --- a/x11/swayr/Makefile +++ b/x11/swayr/Makefile @@ -1,6 +1,6 @@ PORTNAME= swayr DISTVERSIONPREFIX= v -DISTVERSION= 0.15.0 +DISTVERSION= 0.16.0 CATEGORIES= x11 MASTER_SITES= https://git.sr.ht/~tsdh/${PORTNAME}/archive/${DISTVERSIONFULL}${EXTRACT_SUFX}?dummy=/ # XXX Teach USES=cargo to not override default DISTFILES @@ -24,7 +24,7 @@ CARGO_CRATES= aho-corasick-0.7.18 \ clap-3.1.6 \ clap_derive-3.1.4 \ directories-4.0.1 \ - dirs-sys-0.3.6 \ + dirs-sys-0.3.7 \ env_logger-0.9.0 \ getrandom-0.2.5 \ hashbrown-0.11.2 \ @@ -42,12 +42,12 @@ CARGO_CRATES= aho-corasick-0.7.18 \ proc-macro-error-1.0.4 \ proc-macro-error-attr-1.0.4 \ proc-macro2-1.0.36 \ - quote-1.0.15 \ + quote-1.0.16 \ rand-0.8.5 \ rand_chacha-0.3.1 \ rand_core-0.6.3 \ redox_syscall-0.2.11 \ - redox_users-0.4.0 \ + redox_users-0.4.2 \ regex-1.5.5 \ regex-syntax-0.6.25 \ rt-format-0.3.0 \ @@ -58,7 +58,7 @@ CARGO_CRATES= aho-corasick-0.7.18 \ strsim-0.10.0 \ swayipc-3.0.0 \ swayipc-types-1.0.1 \ - syn-1.0.88 \ + syn-1.0.89 \ termcolor-1.1.3 \ textwrap-0.15.0 \ thiserror-1.0.30 \ diff --git a/x11/swayr/distinfo b/x11/swayr/distinfo index 51b907b4974..cc499be4d18 100644 --- a/x11/swayr/distinfo +++ b/x11/swayr/distinfo @@ -1,6 +1,6 @@ -TIMESTAMP = 1647338635 -SHA256 (swayr-v0.15.0.tar.gz) = e2086db48ad5dc4966d6854e207209c27da48922712b9b10b66e2ccac8da7297 -SIZE (swayr-v0.15.0.tar.gz) = 935364 +TIMESTAMP = 1647615684 +SHA256 (swayr-v0.16.0.tar.gz) = e1d41462d4a2069b558cb6c54b7125d1475c473e0f64b5f31df32fe743670353 +SIZE (swayr-v0.16.0.tar.gz) = 935849 SHA256 (rust/crates/aho-corasick-0.7.18.crate) = 1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f SIZE (rust/crates/aho-corasick-0.7.18.crate) = 112923 SHA256 (rust/crates/atty-0.2.14.crate) = d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8 @@ -17,8 +17,8 @@ SHA256 (rust/crates/clap_derive-3.1.4.crate) = da95d038ede1a964ce99f49cbe27a7fb5 SIZE (rust/crates/clap_derive-3.1.4.crate) = 25358 SHA256 (rust/crates/directories-4.0.1.crate) = f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210 SIZE (rust/crates/directories-4.0.1.crate) = 16040 -SHA256 (rust/crates/dirs-sys-0.3.6.crate) = 03d86534ed367a67548dc68113a0f5db55432fdfbb6e6f9d77704397d95d5780 -SIZE (rust/crates/dirs-sys-0.3.6.crate) = 10626 +SHA256 (rust/crates/dirs-sys-0.3.7.crate) = 1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6 +SIZE (rust/crates/dirs-sys-0.3.7.crate) = 10597 SHA256 (rust/crates/env_logger-0.9.0.crate) = 0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3 SIZE (rust/crates/env_logger-0.9.0.crate) = 33573 SHA256 (rust/crates/getrandom-0.2.5.crate) = d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77 @@ -53,8 +53,8 @@ SHA256 (rust/crates/proc-macro-error-attr-1.0.4.crate) = a1be40180e52ecc98ad80b1 SIZE (rust/crates/proc-macro-error-attr-1.0.4.crate) = 7971 SHA256 (rust/crates/proc-macro2-1.0.36.crate) = c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029 SIZE (rust/crates/proc-macro2-1.0.36.crate) = 41411 -SHA256 (rust/crates/quote-1.0.15.crate) = 864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145 -SIZE (rust/crates/quote-1.0.15.crate) = 27281 +SHA256 (rust/crates/quote-1.0.16.crate) = b4af2ec4714533fcdf07e886f17025ace8b997b9ce51204ee69b6da831c3da57 +SIZE (rust/crates/quote-1.0.16.crate) = 27566 SHA256 (rust/crates/rand-0.8.5.crate) = 34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404 SIZE (rust/crates/rand-0.8.5.crate) = 87113 SHA256 (rust/crates/rand_chacha-0.3.1.crate) = e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88 @@ -63,8 +63,8 @@ SHA256 (rust/crates/rand_core-0.6.3.crate) = d34f1408f55294453790c48b2f1ebbb1c5b SIZE (rust/crates/rand_core-0.6.3.crate) = 21938 SHA256 (rust/crates/redox_syscall-0.2.11.crate) = 8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c SIZE (rust/crates/redox_syscall-0.2.11.crate) = 24032 -SHA256 (rust/crates/redox_users-0.4.0.crate) = 528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64 -SIZE (rust/crates/redox_users-0.4.0.crate) = 13023 +SHA256 (rust/crates/redox_users-0.4.2.crate) = 7776223e2696f1aa4c6b0170e83212f47296a00424305117d013dfe86fb0fe55 +SIZE (rust/crates/redox_users-0.4.2.crate) = 15302 SHA256 (rust/crates/regex-1.5.5.crate) = 1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286 SIZE (rust/crates/regex-1.5.5.crate) = 238119 SHA256 (rust/crates/regex-syntax-0.6.25.crate) = f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b @@ -85,8 +85,8 @@ SHA256 (rust/crates/swayipc-3.0.0.crate) = 40cc7e2bba9f31e7c46b119d9c542496806b9 SIZE (rust/crates/swayipc-3.0.0.crate) = 3681 SHA256 (rust/crates/swayipc-types-1.0.1.crate) = 620c3054335b817901d36f06fa5ef715f04d59d7b96f48ecc1a7bf408f194af7 SIZE (rust/crates/swayipc-types-1.0.1.crate) = 5624 -SHA256 (rust/crates/syn-1.0.88.crate) = ebd69e719f31e88618baa1eaa6ee2de5c9a1c004f1e9ecdb58e8352a13f20a01 -SIZE (rust/crates/syn-1.0.88.crate) = 236112 +SHA256 (rust/crates/syn-1.0.89.crate) = ea297be220d52398dcc07ce15a209fce436d361735ac1db700cab3b6cdfb9f54 +SIZE (rust/crates/syn-1.0.89.crate) = 235966 SHA256 (rust/crates/termcolor-1.1.3.crate) = bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755 SIZE (rust/crates/termcolor-1.1.3.crate) = 17242 SHA256 (rust/crates/textwrap-0.15.0.crate) = b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb diff --git a/x11/swaysettings/Makefile b/x11/swaysettings/Makefile index f2a85921bff..92a726d0615 100644 --- a/x11/swaysettings/Makefile +++ b/x11/swaysettings/Makefile @@ -1,5 +1,6 @@ PORTNAME= swaysettings PORTVERSION= s20220227 +PORTREVISION= 1 CATEGORIES= x11 MAINTAINER= jbeich@FreeBSD.org diff --git a/x11/waybar/Makefile b/x11/waybar/Makefile index 71c2da5c9e9..a11ac650a84 100644 --- a/x11/waybar/Makefile +++ b/x11/waybar/Makefile @@ -1,5 +1,6 @@ PORTNAME= waybar DISTVERSION= 0.9.12 +PORTREVISION= 1 CATEGORIES= x11 PATCH_SITES= https://github.com/${GH_ACCOUNT}/${GH_PROJECT}/commit/ diff --git a/x11/wbar/Makefile b/x11/wbar/Makefile index 52ef0a93c63..3cbc00ebfb3 100644 --- a/x11/wbar/Makefile +++ b/x11/wbar/Makefile @@ -2,7 +2,7 @@ PORTNAME= wbar PORTVERSION= 2.3.4 -PORTREVISION= 3 +PORTREVISION= 4 CATEGORIES= x11 MASTER_SITES= https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/${PORTNAME}/ diff --git a/x11/wcm/Makefile b/x11/wcm/Makefile index 4cafdc06d83..90251e806dc 100644 --- a/x11/wcm/Makefile +++ b/x11/wcm/Makefile @@ -1,6 +1,7 @@ PORTNAME= wcm DISTVERSIONPREFIX= v DISTVERSION= 0.7.0 +PORTREVISION= 1 CATEGORIES= x11 MAINTAINER= jbeich@FreeBSD.org diff --git a/x11/wf-shell/Makefile b/x11/wf-shell/Makefile index 8cf1eb1d71d..a7442498a18 100644 --- a/x11/wf-shell/Makefile +++ b/x11/wf-shell/Makefile @@ -1,7 +1,7 @@ PORTNAME= wf-shell DISTVERSIONPREFIX= v DISTVERSION= 0.7.0 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11 MAINTAINER= jbeich@FreeBSD.org diff --git a/x11/wl-mirror/Makefile b/x11/wl-mirror/Makefile index 3626fad22a5..a169086945f 100644 --- a/x11/wl-mirror/Makefile +++ b/x11/wl-mirror/Makefile @@ -1,6 +1,6 @@ PORTNAME= wl-mirror DISTVERSIONPREFIX= v -DISTVERSION= 0.8.2 +DISTVERSION= 0.9.2 CATEGORIES= x11 MAINTAINER= jbeich@FreeBSD.org @@ -9,6 +9,8 @@ COMMENT= Simple Wayland output mirror client LICENSE= GPLv3 LICENSE_FILE= ${WRKSRC}/LICENSE +BROKEN_FreeBSD_12= ld: error: undefined symbol: memfd_create + BUILD_DEPENDS= wayland-protocols>0:graphics/wayland-protocols LIB_DEPENDS= libwayland-egl.so:graphics/wayland @@ -17,7 +19,7 @@ USE_GITHUB= yes USE_GITLAB= nodefault USE_GL= glesv2 egl GH_ACCOUNT= Ferdi265 -GL_TUPLE= https://gitlab.freedesktop.org:wlroots:wlr-protocols:d998ee6fc64ea7e066014023653d1271b7702c09:wlrproto/proto/wlr-protocols +GL_TUPLE= https://gitlab.freedesktop.org:wlroots:wlr-protocols:0c7437e2b600382f0be33949c4e244b2a3702fcb:wlrproto/proto/wlr-protocols PLIST_FILES= bin/${PORTNAME} post-patch: diff --git a/x11/wl-mirror/distinfo b/x11/wl-mirror/distinfo index efa8364e7ed..775371bcc3a 100644 --- a/x11/wl-mirror/distinfo +++ b/x11/wl-mirror/distinfo @@ -1,5 +1,5 @@ -TIMESTAMP = 1647015984 -SHA256 (Ferdi265-wl-mirror-v0.8.2_GH0.tar.gz) = 4dcd8e1e9c0c7a6c86ed6a5e2028b3956c1c6640bf79236af37407bb47e01010 -SIZE (Ferdi265-wl-mirror-v0.8.2_GH0.tar.gz) = 32696 -SHA256 (wlroots-wlr-protocols-d998ee6fc64ea7e066014023653d1271b7702c09_GL0.tar.gz) = b5bb4a054321beb900378aa4ceb4f8f68d840f2e8c70defcc9d6ba644ab18959 -SIZE (wlroots-wlr-protocols-d998ee6fc64ea7e066014023653d1271b7702c09_GL0.tar.gz) = 22764 +TIMESTAMP = 1648421245 +SHA256 (Ferdi265-wl-mirror-v0.9.2_GH0.tar.gz) = 18be6edd1ad292121ee9f07bf7eec650db76c7e1b1f39962506bef54ee5bd66f +SIZE (Ferdi265-wl-mirror-v0.9.2_GH0.tar.gz) = 37823 +SHA256 (wlroots-wlr-protocols-0c7437e2b600382f0be33949c4e244b2a3702fcb_GL0.tar.gz) = a0692e5454b8cdb3cb5a7c89393b870cd71289f749508ad31e572705c0645a07 +SIZE (wlroots-wlr-protocols-0c7437e2b600382f0be33949c4e244b2a3702fcb_GL0.tar.gz) = 22792 diff --git a/x11/workrave/Makefile b/x11/workrave/Makefile index ca367dedd36..3b7b6b99d00 100644 --- a/x11/workrave/Makefile +++ b/x11/workrave/Makefile @@ -2,6 +2,7 @@ PORTNAME= workrave PORTVERSION= 1.10.44 +PORTREVISION= 1 CATEGORIES= x11 MASTER_SITES= https://github.com/rcaelers/workrave/releases/download/v${PORTVERSION:S/./_/g}/ diff --git a/x11/xapp/Makefile b/x11/xapp/Makefile index 45efb60c1db..4f6658d5f17 100644 --- a/x11/xapp/Makefile +++ b/x11/xapp/Makefile @@ -2,6 +2,7 @@ PORTNAME= xapp PORTVERSION= 2.2.8 +PORTREVISION= 1 CATEGORIES= x11 gnome DIST_SUBDIR= gnome diff --git a/x11/xfce4-screenshooter-plugin/Makefile b/x11/xfce4-screenshooter-plugin/Makefile index 2062244c927..d28071b2b5c 100644 --- a/x11/xfce4-screenshooter-plugin/Makefile +++ b/x11/xfce4-screenshooter-plugin/Makefile @@ -2,6 +2,7 @@ PORTNAME= xfce4-screenshooter-plugin PORTVERSION= 1.9.10 +PORTREVISION= 1 CATEGORIES= x11 xfce MASTER_SITES= XFCE/apps DISTNAME= xfce4-screenshooter-${DISTVERSIONFULL} diff --git a/x11/xfce4-terminal/Makefile b/x11/xfce4-terminal/Makefile index 454cc5a0db6..106bd114ef1 100644 --- a/x11/xfce4-terminal/Makefile +++ b/x11/xfce4-terminal/Makefile @@ -2,7 +2,7 @@ PORTNAME= xfce4-terminal PORTVERSION= 0.8.10 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= x11 xfce MASTER_SITES= XFCE/apps DIST_SUBDIR= xfce4 diff --git a/x11/xkeyboard-config/Makefile b/x11/xkeyboard-config/Makefile index c7ede6bc2d0..3c8b2548b81 100644 --- a/x11/xkeyboard-config/Makefile +++ b/x11/xkeyboard-config/Makefile @@ -1,5 +1,6 @@ PORTNAME= xkeyboard-config DISTVERSION= 2.34 +PORTREVISION= 1 CATEGORIES= x11 MASTER_SITES= XORG/individual/data/${PORTNAME} diff --git a/x11/xscreensaver/Makefile b/x11/xscreensaver/Makefile index 9cd9cdd7afc..0acd9b9e13e 100644 --- a/x11/xscreensaver/Makefile +++ b/x11/xscreensaver/Makefile @@ -2,7 +2,7 @@ PORTNAME= xscreensaver PORTVERSION= 5.44 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= x11 MASTER_SITES= http://www.jwz.org/xscreensaver/ diff --git a/x11/xsnow/Makefile b/x11/xsnow/Makefile index ccfe5b826e7..b9bf22c1947 100644 --- a/x11/xsnow/Makefile +++ b/x11/xsnow/Makefile @@ -2,6 +2,7 @@ PORTNAME= xsnow PORTVERSION= 3.4.4 +PORTREVISION= 1 CATEGORIES= x11 MASTER_SITES= SF/${PORTNAME}/ diff --git a/x11/yelp/Makefile b/x11/yelp/Makefile index 0f6e13a4ff5..827e25865a9 100644 --- a/x11/yelp/Makefile +++ b/x11/yelp/Makefile @@ -2,6 +2,7 @@ PORTNAME= yelp DISTVERSION= 40.3 +PORTREVISION= 1 CATEGORIES= x11 gnome MASTER_SITES= GNOME/sources/${PORTNAME}/${PORTVERSION:C/^([0-9]+)\..*/\1/} DIST_SUBDIR= gnome3