First Commit
Some checks failed
/ build_macos (push) Has been cancelled
/ build_windows (push) Has been cancelled
/ build_ubuntu (push) Has been cancelled

This commit is contained in:
2025-11-19 16:23:45 +07:00
commit dbdc5bcc4a
1791 changed files with 489451 additions and 0 deletions

View File

@@ -0,0 +1,20 @@
EXTRA_DIST = \
extract_version \
lint \
rmlo.cxx \
splitconfig \
template2mak.py \
pqxxthreadsafety.cxx
AM_CPPFLAGS=-I$(top_builddir)/include -I$(top_srcdir)/include ${POSTGRES_INCLUDE}
# Override automatically generated list of default includes. It contains only
# unnecessary entries, and incorrectly mentions include/pqxx directly.
DEFAULT_INCLUDES=
noinst_PROGRAMS = rmlo pqxxthreadsafety
rmlo_SOURCES = rmlo.cxx
rmlo_LDADD = $(top_builddir)/src/libpqxx.la ${POSTGRES_LIB}
pqxxthreadsafety_SOURCES = pqxxthreadsafety.cxx
pqxxthreadsafety_LDADD = $(top_builddir)/src/libpqxx.la ${POSTGRES_LIB}

View File

@@ -0,0 +1,638 @@
# Makefile.in generated by automake 1.16.4 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2021 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
VPATH = @srcdir@
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
noinst_PROGRAMS = rmlo$(EXEEXT) pqxxthreadsafety$(EXEEXT)
subdir = tools
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/config/m4/libtool.m4 \
$(top_srcdir)/config/m4/ltoptions.m4 \
$(top_srcdir)/config/m4/ltsugar.m4 \
$(top_srcdir)/config/m4/ltversion.m4 \
$(top_srcdir)/config/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(SHELL) $(top_srcdir)/config/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/include/pqxx/config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
PROGRAMS = $(noinst_PROGRAMS)
am_pqxxthreadsafety_OBJECTS = pqxxthreadsafety.$(OBJEXT)
pqxxthreadsafety_OBJECTS = $(am_pqxxthreadsafety_OBJECTS)
pqxxthreadsafety_DEPENDENCIES = $(top_builddir)/src/libpqxx.la
AM_V_lt = $(am__v_lt_@AM_V@)
am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
am__v_lt_0 = --silent
am__v_lt_1 =
am_rmlo_OBJECTS = rmlo.$(OBJEXT)
rmlo_OBJECTS = $(am_rmlo_OBJECTS)
rmlo_DEPENDENCIES = $(top_builddir)/src/libpqxx.la
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
depcomp = $(SHELL) $(top_srcdir)/config/depcomp
am__maybe_remake_depfiles = depfiles
am__depfiles_remade = ./$(DEPDIR)/pqxxthreadsafety.Po \
./$(DEPDIR)/rmlo.Po
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CXXFLAGS) $(CXXFLAGS)
AM_V_CXX = $(am__v_CXX_@AM_V@)
am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@)
am__v_CXX_0 = @echo " CXX " $@;
am__v_CXX_1 =
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CXXLD = $(am__v_CXXLD_@AM_V@)
am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@)
am__v_CXXLD_0 = @echo " CXXLD " $@;
am__v_CXXLD_1 =
SOURCES = $(pqxxthreadsafety_SOURCES) $(rmlo_SOURCES)
DIST_SOURCES = $(pqxxthreadsafety_SOURCES) $(rmlo_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/config/depcomp \
$(top_srcdir)/config/mkinstalldirs
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
AR = @AR@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CSCOPE = @CSCOPE@
CTAGS = @CTAGS@
CXX = @CXX@
CXXCPP = @CXXCPP@
CXXDEPMODE = @CXXDEPMODE@
CXXFLAGS = @CXXFLAGS@
CYGPATH_W = @CYGPATH_W@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DLLTOOL = @DLLTOOL@
DOXYGEN = @DOXYGEN@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGREP = @EGREP@
ETAGS = @ETAGS@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
GREP = @GREP@
HAVE_DOT = @HAVE_DOT@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
LD = @LD@
LDFLAGS = @LDFLAGS@
LIBOBJS = @LIBOBJS@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
MAINT = @MAINT@
MAKEINFO = @MAKEINFO@
MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR = @MKDIR@
MKDIR_P = @MKDIR_P@
NM = @NM@
NMEDIT = @NMEDIT@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
PG_CONFIG = @PG_CONFIG@
PKG_CONFIG = @PKG_CONFIG@
POSTGRES_INCLUDE = @POSTGRES_INCLUDE@
PQXXVERSION = @PQXXVERSION@
PQXX_ABI = @PQXX_ABI@
RANLIB = @RANLIB@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
STRIP = @STRIP@
VERSION = @VERSION@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_CXX = @ac_ct_CXX@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
runstatedir = @runstatedir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
with_postgres_lib = @with_postgres_lib@
EXTRA_DIST = \
extract_version \
lint \
rmlo.cxx \
splitconfig \
template2mak.py \
pqxxthreadsafety.cxx
AM_CPPFLAGS = -I$(top_builddir)/include -I$(top_srcdir)/include ${POSTGRES_INCLUDE}
# Override automatically generated list of default includes. It contains only
# unnecessary entries, and incorrectly mentions include/pqxx directly.
DEFAULT_INCLUDES =
rmlo_SOURCES = rmlo.cxx
rmlo_LDADD = $(top_builddir)/src/libpqxx.la ${POSTGRES_LIB}
pqxxthreadsafety_SOURCES = pqxxthreadsafety.cxx
pqxxthreadsafety_LDADD = $(top_builddir)/src/libpqxx.la ${POSTGRES_LIB}
all: all-am
.SUFFIXES:
.SUFFIXES: .cxx .lo .o .obj
$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu tools/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --gnu tools/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
clean-noinstPROGRAMS:
@list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
pqxxthreadsafety$(EXEEXT): $(pqxxthreadsafety_OBJECTS) $(pqxxthreadsafety_DEPENDENCIES) $(EXTRA_pqxxthreadsafety_DEPENDENCIES)
@rm -f pqxxthreadsafety$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(pqxxthreadsafety_OBJECTS) $(pqxxthreadsafety_LDADD) $(LIBS)
rmlo$(EXEEXT): $(rmlo_OBJECTS) $(rmlo_DEPENDENCIES) $(EXTRA_rmlo_DEPENDENCIES)
@rm -f rmlo$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(rmlo_OBJECTS) $(rmlo_LDADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pqxxthreadsafety.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/rmlo.Po@am__quote@ # am--include-marker
$(am__depfiles_remade):
@$(MKDIR_P) $(@D)
@echo '# dummy' >$@-t && $(am__mv) $@-t $@
am--depfiles: $(am__depfiles_remade)
.cxx.o:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
@am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $<
.cxx.obj:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\
@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ `$(CYGPATH_W) '$<'` &&\
@am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cxx.lo:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\
@am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
@am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Plo
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(BUILT_SOURCES)
$(MAKE) $(AM_MAKEFLAGS) distdir-am
distdir-am: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(PROGRAMS)
installdirs:
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \
mostlyclean-am
distclean: distclean-am
-rm -f ./$(DEPDIR)/pqxxthreadsafety.Po
-rm -f ./$(DEPDIR)/rmlo.Po
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f ./$(DEPDIR)/pqxxthreadsafety.Po
-rm -f ./$(DEPDIR)/rmlo.Po
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am:
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am am--depfiles check check-am clean \
clean-generic clean-libtool clean-noinstPROGRAMS cscopelist-am \
ctags ctags-am distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am install-info \
install-info-am install-man install-pdf install-pdf-am \
install-ps install-ps-am install-strip installcheck \
installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags tags-am uninstall uninstall-am
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:

View File

@@ -0,0 +1,6 @@
#! /bin/sh
set -eu
MARKER='include.*ignore-deprecated-pre'
FILES="src include tools/*.cxx test config-tests"
grep -Ircl $MARKER $FILES | sort

View File

@@ -0,0 +1,73 @@
#! /bin/sh
set -eu
ARG="${1:-}"
# Source directory. In out-of-tree builds, Automake sets this for us.
srcdir=${srcdir:-.}
# Print usage information.
usage() {
cat <<EOF
Print libpqxx version information based on the source tree's VERSION file.
Usage: $0 [option]
Version strings look like: <major>.<minor>.<revision>.
Acceptable option values are:
-h, --help Print this message, and exit.
-a, --abi Show libpqxx ABI version; leave out revision number.
-f, --full Show full libpqxx version string (the default).
-M, --major Show major libpqxx version.
-m, --minor Show minor libpqxx version (between major and revision).
EOF
}
# Print "unknown argument" error.
unknown_arg() {
cat <<EOF >&2
Unknown argument: $1.
Try
$0 --help
for usage information.
EOF
}
case "$ARG" in
''|-f|--full)
# Default: Print full version.
cat $srcdir/VERSION
;;
-h|--help)
# Print usage information, and exit.
usage
exit
;;
-a|--abi)
# Print just the ABI version (major & minor).
sed -e 's/^\([^.]*\.[^.]*\)\..*/\1/' $srcdir/VERSION
;;
-M|--major)
# Print the major version number.
sed -e 's/^\([^.]*\)\..*/\1/' $srcdir/VERSION
;;
-m|--minor)
# Print the minor version number.
sed -e 's/^[^.]*\.\([^.]*\)\..*/\1/' $srcdir/VERSION
;;
*)
unknown_arg $ARG
exit 1
;;
esac

20
ext/libpqxx-7.7.3/tools/format Executable file
View File

@@ -0,0 +1,20 @@
#! /bin/bash
#
# Reformat source code using clang-format.
#
# This script is not portable: as of Ubuntu 21.04, virtualenv's "activate"
# seems to rely on a non-POSIX variable, $OSTYPE.
set -C -u -e
# Reformat C++ files.
find -name \*.cxx -o -name \*.hxx | xargs clang-format -i
# Reformat CMake files.
WORKDIR=$(mktemp -d)
virtualenv -q --python=$(which python3) "$WORKDIR/venv"
. "$WORKDIR/venv/bin/activate"
pip install -q six pyaml cmake-format
(find -name CMakeLists.txt | xargs cmake-format -i) || /bin/true
rm -rf "$WORKDIR"

197
ext/libpqxx-7.7.3/tools/lint Executable file
View File

@@ -0,0 +1,197 @@
#! /bin/bash
#
# Routine sanity checks for libpqxx source tree.
#
# Optionally, set environment variable "srcdir" to the source directory. It
# defaults to the parent directory of the one where this script is. This trick
# requires bash (or a close equivalent) as the shell.
set -eu -o pipefail
SRCDIR="${srcdir:-$(dirname "${BASH_SOURCE[0]}")/..}"
PQXXVERSION="$(cd "$SRCDIR" && "$SRCDIR/tools/extract_version")"
ARGS="${1:-}"
# Check that all source code is ASCII.
#
# I'd love to have rich Unicode, but I can live without it. But we don't want
# any surprises in contributions.
check_ascii() {
local exotics=$(
find -name \*.cxx -o -name \*.hxx |
xargs cat |
tr -d '\011-\176' |
wc -c
)
if [ $exotics != 0 ]
then
echo >&2 "There's a non-ASCII character somewhere."
exit 1
fi
}
# This version must be at the top of the NEWS file.
check_news_version() {
if ! head -n1 $SRCDIR/NEWS | grep -q "^$PQXXVERSION\$"
then
cat <<EOF >&2
Version $PQXXVERSION is not at the top of NEWS.
EOF
exit 1
fi
}
# Count number of times header $1 is included from each of given input files.
# Output is lines of <filename>:<count>, one line per file, sorted.
count_includes() {
local HEADER_NAME WS PAT
HEADER_NAME="$1"
shift
WS="[[:space:]]*"
PAT="^${WS}#${WS}include${WS}[<\"]$HEADER_NAME[>\"]"
# It's OK for the grep to fail.
(grep -c "$PAT" $* || /bin/true) | sort
}
# Check that any includes of $1-pre.hxx are matched by $1-post.hxx ones.
match_pre_post_headers() {
local NAME TEMPDIR PRE POST HEADERS
NAME="$1"
TEMPDIR="$(mktemp -d)"
if test -z "$TEMPDIR"
then
echo >&2 "Could not create temporary directory."
exit 1
fi
PRE="$TEMPDIR/pre"
POST="$TEMPDIR/post"
HEADERS=$(find include/pqxx/* -type f | grep -v '\.swp$')
count_includes \
$SRCDIR/NAME-pre.hxx $HEADERS >"$PRE"
count_includes \
$SRCDIR/NAME-post.hxx $HEADERS >"$POST"
DIFF="$(diff "$PRE" "$POST")" || /bin/true
rm -r -- "$TEMPDIR"
if test -n "$DIFF"
then
cat <<EOF >&2
Mismatched pre/post header pairs:
$DIFF
EOF
exit 1
fi
}
# Any file that includes header-pre.hxx must also include header-post.hxx, and
# vice versa. Similar for ignore-deprecated-{pre|post}.hxx.
check_compiler_internal_headers() {
match_pre_post_headers "pqxx/internal/header"
match_pre_post_headers "pqxx/internal/ignore-deprecated"
}
cpplint() {
local cxxflags dialect includes
if which clang-tidy >/dev/null
then
if [ -e compile_flags ]
then
# Pick out relevant flags, but leave out the rest.
# If we're not compiling with clang, compile_flags may contain
# options that clang-tidy doesn't recognise.
dialect="$(grep -o -- '-std=[^[:space:]]*' compile_flags || true)"
includes="$(
grep -o -- '-I[[:space:]]*[^[:space:]]*' compile_flags ||
true)"
else
dialect=""
includes=""
fi
cxxflags="$dialect $includes"
# TODO: Please, is there any way we can parallelise this?
# TODO: I'd like cppcoreguidelines-*, but it's a tsunami of false positives.
# TODO: Some useful checks in abseil-*, but it recommends "use our library."
# TODO: Check test/, but tolerate some of the dubious stuff tests do.
clang-tidy \
$(find $SRCDIR/src $SRCDIR/tools -name \*.cxx) \
--checks=boost-*, \
-- \
-I$SRCDIR/include -Iinclude $cxxflags
fi
# Run Facebook's "infer" static analyser, if available.
# Instructions here: https://fbinfer.com/docs/getting-started/
if which infer >/dev/null
then
# This will work in an out-of-tree build, but either way it does
# require a successful "configure", or a cmake with the "make"
# generator.
infer capture -- make -j$(nproc)
infer run
fi
}
pylint() {
local PYFILES="$SRCDIR/tools/*.py $SRCDIR/tools/splitconfig"
echo "Skipping pocketlint; it's not up to date with Python3."
# if which pocketlint >/dev/null
# then
# pocketlint $PYFILES
# fi
if which pyflakes3 >/dev/null
then
pyflakes3 $PYFILES
fi
}
main() {
local full="no"
for arg in $ARGS
do
case $arg in
-h|--help)
cat <<EOF
Perform static checks on libpqxx build tree.
Usage:
$0 -h|--help -- print this message and exit.
$0 -f|--full -- perform full check, including C++ analysis.
$0 -- perform default check.
EOF
exit 0
;;
-f|--full)
full="yes"
;;
*)
echo >&2 "Unknown argument: '$arg'"
exit 1
;;
esac
done
check_ascii
pylint
check_news_version
check_compiler_internal_headers
if [ $full == "yes" ]
then
cpplint
fi
}
main

View File

@@ -0,0 +1,70 @@
#! /usr/bin/env python3
"""M4-quote text, for use as a literal in configure.ac.
Produces M4 "code" which evaluates to the input text.
It's not easy to read plain text from an input file in M4, without having it
expanded as M4. Sometimes all we want is literal text!
"""
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
from argparse import ArgumentParser
from sys import (
stdin,
stdout,
)
def parse_args():
parser = ArgumentParser(description=__doc__)
parser.add_argument(
'--open', '-a', default='[[', help="Current open-quote symbol.")
parser.add_argument(
'--close', '-b', default=']]', help="Current close-quote symbol.")
parser.add_argument(
'--input', '-i', default='-', help="Input file, or '-' for stdin.")
parser.add_argument(
'--output', '-o', default='-', help="Output file, or '-' for stdout.")
return parser.parse_args()
def open_input(in_file):
if in_file == '-':
return stdin
else:
return open(in_file)
def open_output(out_file):
if out_file == '-':
return stdout
else:
return open(out_file, 'w')
def escape(line):
return (
line
.replace('[', '@<:@')
.replace(']', '@:>@')
.replace('#', '@%:@')
.replace('$', '@S|@')
)
def main(args):
with open_input(args.input) as istr, open_output(args.output) as ostr:
ostr.write(args.open)
for line in istr:
ostr.write(escape(line))
ostr.write('\n')
ostr.write(args.close)
if __name__ == '__main__':
main(parse_args())

View File

@@ -0,0 +1,10 @@
// Print thread-safety information for present libpqxx build.
#include <iostream>
#include "pqxx/util"
int main()
{
std::cout << pqxx::describe_thread_safety().description << std::endl;
}

View File

@@ -0,0 +1,39 @@
// Remove large objects given on the command line from the default database.
#include <iostream>
#include "pqxx/pqxx"
int main(int, char *argv[])
{
pqxx::connection conn;
bool failures = false;
try
{
for (int i{1}; argv[i]; ++i)
{
auto o{pqxx::from_string<pqxx::oid>(argv[i])};
try
{
pqxx::perform([o, &conn] {
pqxx::work tx{conn};
pqxx::blob::remove(tx, o);
tx.commit();
});
}
catch (std::exception const &e)
{
std::cerr << e.what() << std::endl;
failures = true;
}
}
}
catch (std::exception const &e)
{
std::cerr << e.what() << std::endl;
return 2;
}
return failures;
}

View File

@@ -0,0 +1,244 @@
#! /usr/bin/env python3
"""Extract configuration items into various configuration headers.
This uses the configitems file, a database consisting of text lines with the
following single-tab-separated fields:
- Name of the configuration item, e.g. PQXX_HAVE_PTRDIFF_T.
- Publication marker: public or internal.
- A single environmental factor determining the item, e.g. libpq or compiler.
"""
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
from argparse import ArgumentParser
import codecs
from errno import ENOENT
import os.path
from os import getcwd
import re
from sys import (
getdefaultencoding,
getfilesystemencoding,
stdout,
)
__metaclass__ = type
def guess_fs_encoding():
"""Try to establish the filesystem encoding.
It's a sad thing: some guesswork is involved. The encoding often seems to
be conservatively, and incorrectly, set to ascii.
"""
candidates = [
getfilesystemencoding(),
getdefaultencoding(),
'utf-8',
]
for encoding in candidates:
lower = encoding.lower()
if lower != 'ascii' and lower != 'ansi_x3.4-1968':
return encoding
raise AssertionError("unreachable code reached.")
def guess_output_encoding():
"""Return the encoding of standard output."""
# Apparently builds in Docker containers may have None as an encoding.
# Fall back to ASCII. If this ever happens in a non-ASCII path, well,
# there may be a more difficult decision to be made. We'll burn that
# bridge when we get to it, as they almost say.
return stdout.encoding or 'ascii'
def decode_path(path):
"""Decode a path element from bytes to unicode string."""
return path.decode(guess_fs_encoding())
def encode_path(path):
"""Encode a path element from unicode string to bytes."""
# Nasty detail: unicode strings are stored as UTF-16. Which can contain
# surrogate pairs. And those break in encoding, unless you use this
# special error handler.
return path.encode(guess_fs_encoding(), 'surrogateescape')
def read_text_file(path, encoding='utf-8'):
"""Read text file, return as string, or `None` if file is not there."""
assert isinstance(path, type(''))
try:
with codecs.open(encode_path(path), encoding=encoding) as stream:
return stream.read()
except IOError as error:
if error.errno == ENOENT:
return None
else:
raise
def read_lines(path, encoding='utf-8'):
"""Read text file, return as list of lines."""
assert isinstance(path, type(''))
with codecs.open(encode_path(path), encoding=encoding) as stream:
return list(stream)
def read_configitems(filename):
"""Read the configuration-items database.
:param filename: Path to the configitems file.
:return: Sequence of text lines from configitems file.
"""
return [line.split() for line in read_lines(filename)]
def map_configitems(items):
"""Map each config item to publication/factor.
:param items: Sequence of config items: (name, publication, factor).
:return: Dict mapping each item name to a tuple (publication, factor).
"""
return {
item: (publication, factor)
for item, publication, factor in items
}
def read_header(source_tree, filename):
"""Read the original config.h generated by autoconf.
:param source_tree: Path to libpqxx source tree.
:param filename: Path to the config.h file.
:return: Sequence of text lines from config.h.
"""
assert isinstance(source_tree, type(''))
assert isinstance(filename, type(''))
return read_lines(os.path.join(source_tree, filename))
def extract_macro_name(config_line):
"""Extract a cpp macro name from a configuration line.
:param config_line: Text line from config.h which may define a macro.
:return: Name of macro defined in `config_line` if it is a `#define`
statement, or None.
"""
config_line = config_line.strip()
match = re.match('\s*#\s*define\s+([^\s]+)', config_line)
if match is None:
return None
else:
return match.group(1)
def extract_section(header_lines, items, publication, factor):
"""Extract config items for given publication/factor from header lines.
:param header_lines: Sequence of header lines from config.h.
:param items: Dict mapping macro names to (publication, factor).
:param publication: Extract only macros for this publication tag.
:param factor: Extract only macros for this environmental factor.
:return: Sequence of `#define` lines from `header_lines` insofar they
fall within the requested section.
"""
return sorted(
line.strip()
for line in header_lines
if items.get(extract_macro_name(line)) == (publication, factor)
)
def compose_header(lines, publication, factor):
"""Generate header text containing given lines."""
intro = (
"/* Automatically generated from config.h: %s/%s config. */"
% (publication, factor)
)
return '\n'.join([intro, ''] + lines + [''])
def generate_config(source_tree, header_lines, items, publication, factor):
"""Generate config file for a given section, if appropriate.
Writes nothing if the configuration file ends up identical to one that's
already there.
:param source_tree: Location of the libpqxx source tree.
:param header_lines: Sequence of header lines from config.h.
:param items: Dict mapping macro names to (publication, factor).
:param publication: Extract only macros for this publication tag.
:param factor: Extract only macros for this environmental factor.
"""
assert isinstance(source_tree, type(''))
config_file = os.path.join(
source_tree, 'include', 'pqxx',
'config-%s-%s.h' % (publication, factor))
unicode_path = config_file.encode(guess_output_encoding(), 'replace')
section = extract_section(header_lines, items, publication, factor)
contents = compose_header(section, publication, factor)
if read_text_file(config_file) == contents:
print("Generating %s: no changes--skipping." % unicode_path)
return
print("Generating %s: %d item(s)." % (unicode_path, len(section)))
path = encode_path(config_file)
with codecs.open(path, 'wb', encoding='ascii') as header:
header.write(contents)
def parse_args():
"""Parse command-line arguments."""
default_source_tree = os.path.dirname(
os.path.dirname(os.path.normpath(os.path.abspath(__file__))))
parser = ArgumentParser(description=__doc__)
parser.add_argument(
'sourcetree', metavar='PATH', default=default_source_tree,
help="Location of libpqxx source tree. Defaults to '%(default)s'.")
return parser.parse_args()
def check_args(args):
"""Validate command-line arguments."""
if not os.path.isdir(args.sourcetree):
raise Exception("Not a directory: '%s'." % args.sourcetree)
def get_current_dir():
cwd = getcwd()
if isinstance(cwd, bytes):
return decode_path(cwd)
else:
return cwd
def main():
"""Main program entry point."""
args = parse_args()
check_args(args)
# The configitems file is under revision control; it's in sourcetree.
items = read_configitems(os.path.join(args.sourcetree, 'configitems'))
publications = sorted(set(item[1] for item in items))
factors = sorted(set(item[2] for item in items))
# The config.h header is generated; it's in the build tree, which should
# be where we are.
directory = get_current_dir()
original_header = read_header(
directory,
os.path.join('include', 'pqxx', 'config.h'))
items_map = map_configitems(items)
for publication in publications:
for factor in factors:
generate_config(
directory, original_header, items_map, publication, factor)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,194 @@
#! /usr/bin/env python3
"""Minimal macro processor. Used for generating VC++ makefiles.
The available template commands are:
Expand a template section for each file in a list of file patterns::
###MAKTEMPLATE:FOREACH my/path*/*.cxx,other*.cxx
...
###MAKTEMPLATE:ENDFOREACH
In the template section, you can use `###BASENAME###` to get the base name
of the file being processed (e.g. "base" for "../base.cxx"), and you can
use `###FILENAME###` to get the full filename.
Copyright (c) 2000-2022, Bart Samwel and Jeroen T. Vermeulen.
"""
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
from argparse import (
ArgumentError,
ArgumentParser,
RawDescriptionHelpFormatter,
)
from contextlib import contextmanager
from glob import glob
import os
from sys import (
argv,
stdin,
stderr,
stdout,
)
import sys
from textwrap import dedent
def expand_foreach_file(path, block, outfile):
"""Expand a "foreach" block for a single file path.
Write the results to outfile.
"""
basepath, _ = os.path.splitext(os.path.basename(path))
for line in block:
line = line.replace("###FILENAME###", path)
line = line.replace("###BASENAME###", basepath)
outfile.write(line)
def match_globs(globs):
"""List all files matching any item in globs.
Eliminates duplicates.
"""
return sorted({
path
for pattern in globs
for path in glob(pattern)
})
def expand_foreach(globs, block, outfile):
"""Expand a foreach block for each file matching one of globs.
Write the results to outfile.
"""
# We'll be iterating over block a variable number of times. Turn it
# from a generic iterable into an immutable array.
block = tuple(block)
for path in match_globs(globs):
expand_foreach_file(path, block, outfile)
# Header to be prefixed to the generated file.
OUTPUT_HEADER = dedent("""\
# AUTOMATICALLY GENERATED FILE -- DO NOT EDIT.
#
# This file is generated automatically by libpqxx's {script} script, and
# will be rewritten from time to time.
#
# If you modify this file, chances are your modifications will be lost.
#
# The {script} script should be available in the tools directory of the
# libpqxx source archive.
""")
foreach_marker = r"###MAKTEMPLATE:FOREACH "
end_foreach_marker = r"###MAKTEMPLATE:ENDFOREACH"
def parse_foreach(line):
"""Parse FOREACH directive, if line contains one.
:param line: One line of template input.
:return: A list of FOREACH globs, or None if this was not a FOREACH line.
"""
line = line.strip()
if line.startswith(foreach_marker):
return line[len(foreach_marker):].split(',')
else:
return None
def read_foreach_block(infile):
"""Read a FOREACH block from infile (not including the FOREACH directive).
Assumes that the FOREACH directive was in the preceding line. Consumes
the line with the ENDFOREACH directive, but does not yield it.
:return: Iterable of lines.
"""
for line in infile:
if line.strip().startswith(end_foreach_marker):
return
yield line
def expand_template(infile, outfile):
"""Expand the template in infile, and write the results to outfile."""
for line in infile:
globs = parse_foreach(line)
if globs is None:
# Not a FOREACH line. Copy to output.
outfile.write(line)
else:
block = read_foreach_block(infile)
expand_foreach(globs, block, outfile)
@contextmanager
def open_stream(path=None, default=None, mode='r'):
"""Open file at given path, or yield default. Close as appropriate.
The default should be a stream, not a path; closing the context will not
close it.
"""
if path is None:
yield default
else:
with open(path, mode) as stream:
yield stream
def parse_args():
"""Parse command-line arguments.
:return: Tuple of: input path (or None for stdin), output path (or None
for stdout).
"""
parser = ArgumentParser(
description=__doc__, formatter_class=RawDescriptionHelpFormatter)
parser.add_argument(
'template', nargs='?',
help="Input template. Defaults to standard input.")
parser.add_argument(
'output', nargs='?',
help="Output file. Defaults to standard output.")
args = parser.parse_args()
return args.template, args.output
def write_header(stream, template_path=None):
"""Write header to stream."""
hr = ('# ' + '#' * 78) + "\n"
script = os.path.basename(argv[0])
outstream.write(hr)
outstream.write(OUTPUT_HEADER.format(script=script))
if template_path is not None:
outstream.write("#\n")
outstream.write("# Generated from template '%s'.\n" % template_path)
outstream.write(hr)
if __name__ == '__main__':
try:
template_path, output_path = parse_args()
except ArgumentError as error:
stderr.write('%s\n' % error)
sys.exit(2)
input_stream = open_stream(template_path, stdin, 'r')
output_stream = open_stream(output_path, stdout, 'w')
with input_stream as instream, output_stream as outstream:
write_header(outstream, template_path)
expand_template(instream, outstream)

View File

@@ -0,0 +1,630 @@
#! /usr/bin/env python3
"""Brute-force test script: test libpqxx against many compilers etc.
This script makes no changes in the source tree; all builds happen in
temporary directories.
To make this possible, you may need to run "make distclean" in the
source tree. The configure script will refuse to configure otherwise.
"""
# Without this, pocketlint does not yet understand the print function.
from __future__ import print_function
from abc import (
ABCMeta,
abstractmethod,
)
from argparse import ArgumentParser
from contextlib import contextmanager
from datetime import datetime
from functools import partial
import json
from multiprocessing import (
JoinableQueue,
Process,
Queue,
)
from multiprocessing.pool import (
Pool,
)
from os import (
cpu_count,
getcwd,
)
import os.path
from queue import Empty
from shutil import rmtree
from subprocess import (
CalledProcessError,
check_call,
check_output,
DEVNULL,
)
from sys import (
stderr,
stdout,
)
from tempfile import mkdtemp
from textwrap import dedent
CPUS = cpu_count()
GCC_VERSIONS = list(range(8, 14))
GCC = ['g++-%d' % ver for ver in GCC_VERSIONS]
CLANG_VERSIONS = list(range(7, 15))
CLANG = ['clang++-6.0'] + ['clang++-%d' % ver for ver in CLANG_VERSIONS]
CXX = GCC + CLANG
STDLIB = (
'',
'-stdlib=libc++',
)
OPT = ('-O0', '-O3')
LINK = {
'static': ['--enable-static', '--disable-shared'],
'dynamic': ['--disable-static', '--enable-shared'],
}
DEBUG = {
'plain': [],
'audit': ['--enable-audit'],
'maintainer': ['--enable-maintainer-mode'],
'full': ['--enable-audit', '--enable-maintainer-mode'],
}
# CMake "generators." Maps a value for cmake's -G option to a command line to
# run.
#
# I prefer Ninja if available, because it's fast. But hey, the default will
# work.
#
# Maps the name of the generator (as used with cmake's -G option) to the
# actual command line needed to do the build.
CMAKE_GENERATORS = {
'Ninja': ['ninja'],
'Unix Makefiles': ['make', '-j%d' % CPUS],
}
class Fail(Exception):
"""A known, well-handled exception. Doesn't need a traceback."""
class Skip(Exception):
""""We're not doing this build. It's not an error though."""
def run(cmd, output, cwd=None):
"""Run a command, write output to file-like object."""
command_line = ' '.join(cmd)
output.write("%s\n\n" % command_line)
check_call(cmd, stdout=output, stderr=output, cwd=cwd)
def report(output, message):
"""Report a message to output, and standard output."""
print(message, flush=True)
output.write('\n\n')
output.write(message)
output.write('\n')
def file_contains(path, text):
"""Does the file at path contain text?"""
with open(path) as stream:
for line in stream:
if text in line:
return True
return False
@contextmanager
def tmp_dir():
"""Create a temporary directory, and clean it up again."""
tmp = mkdtemp()
try:
yield tmp
finally:
rmtree(tmp)
def write_check_code(work_dir):
"""Write a simple C++ program so we can tesst whether we can compile it.
Returns the file's full path.
"""
path = os.path.join(work_dir, "check.cxx")
with open(path, 'w') as source:
source.write(dedent("""\
#include <iostream>
int main()
{
std::cout << "Hello world." << std::endl;
}
"""))
return path
def check_compiler(work_dir, cxx, stdlib, check, verbose=False):
"""Is the given compiler combo available?"""
err_file = os.path.join(work_dir, 'stderr.log')
if verbose:
err_output = open(err_file, 'w')
else:
err_output = DEVNULL
try:
command = [cxx, check]
if stdlib != '':
command.append(stdlib)
check_call(command, cwd=work_dir, stderr=err_output)
except (OSError, CalledProcessError):
if verbose:
with open(err_file) as errors:
stdout.write(errors.read())
print("Can't build with '%s %s'. Skipping." % (cxx, stdlib))
return False
else:
return True
# TODO: Use Pool.
def check_compilers(compilers, stdlibs, verbose=False):
"""Check which compiler configurations are viable."""
with tmp_dir() as work_dir:
check = write_check_code(work_dir)
return [
(cxx, stdlib)
for stdlib in stdlibs
for cxx in compilers
if check_compiler(
work_dir, cxx, stdlib, check=check, verbose=verbose)
]
def find_cmake_command():
"""Figure out a CMake generator we can use, or None."""
try:
caps = check_output(['cmake', '-E', 'capabilities'])
except FileNotFoundError:
return None
names = {generator['name'] for generator in json.loads(caps)['generators']}
for gen in CMAKE_GENERATORS.keys():
if gen in names:
return gen
return None
class Config:
"""Configuration for a build.
These classes must be suitable for pickling, so we can send its objects to
worker processes.
"""
__metaclass__ = ABCMeta
@abstractmethod
def name(self):
"""Return an identifier for this build configuration."""
def make_log_name(self):
"""Compose log file name for this build."""
return "build-%s.out" % self.name()
class Build:
"""A pending or ondoing build, in its own directory.
Each step returns True for Success, or False for failure.
These classes must be suitable for pickling, so we can send its objects to
worker processes.
"""
def __init__(self, logs_dir, config=None):
self.config = config
self.log = os.path.join(logs_dir, config.make_log_name())
# Start a fresh log file.
with open(self.log, 'w') as log:
log.write("Starting %s.\n" % datetime.utcnow())
self.work_dir = mkdtemp()
def clean_up(self):
"""Delete the build tree."""
rmtree(self.work_dir)
@abstractmethod
def configure(self, log):
"""Prepare for a build."""
@abstractmethod
def build(self, log):
"""Build the code, including the tests. Don't run tests though."""
def test(self, log):
"""Run tests."""
run(
[os.path.join(os.path.curdir, 'test', 'runner')], log,
cwd=self.work_dir)
def logging(self, function):
"""Call function, pass open write handle for `self.log`."""
# TODO: Should probably be a decorator.
with open(self.log, 'a') as log:
try:
function(log)
except Exception as error:
log.write("%s\n" % error)
raise
def do_configure(self):
"""Call `configure`, writing output to `self.log`."""
self.logging(self.configure)
def do_build(self):
"""Call `build`, writing output to `self.log`."""
self.logging(self.build)
def do_test(self):
"""Call `test`, writing output to `self.log`."""
self.logging(self.test)
class AutotoolsConfig(Config):
"""A combination of build options for the "configure" script."""
def __init__(self, cxx, opt, stdlib, link, link_opts, debug, debug_opts):
self.cxx = cxx
self.opt = opt
self.stdlib = stdlib
self.link = link
self.link_opts = link_opts
self.debug = debug
self.debug_opts = debug_opts
def name(self):
return '_'.join([
self.cxx, self.opt, self.stdlib, self.link, self.debug])
class AutotoolsBuild(Build):
"""Build using the "configure" script."""
__metaclass__ = ABCMeta
def configure(self, log):
configure = [
os.path.join(getcwd(), "configure"),
"CXX=%s" % self.config.cxx,
]
if self.config.stdlib == '':
configure += [
"CXXFLAGS=%s" % self.config.opt,
]
else:
configure += [
"CXXFLAGS=%s %s" % (self.config.opt, self.config.stdlib),
"LDFLAGS=%s" % self.config.stdlib,
]
configure += [
"--disable-documentation",
] + self.config.link_opts + self.config.debug_opts
run(configure, log, cwd=self.work_dir)
def build(self, log):
run(['make', '-j%d' % CPUS], log, cwd=self.work_dir)
# Passing "TESTS=" like this will suppress the actual running of
# the tests. We run them in the "test" stage.
run(['make', '-j%d' % CPUS, 'check', 'TESTS='], log, cwd=self.work_dir)
class CMakeConfig(Config):
"""Configuration for a CMake build."""
def __init__(self, generator):
self.generator = generator
self.builder = CMAKE_GENERATORS[generator]
def name(self):
return "cmake"
class CMakeBuild(Build):
"""Build using CMake.
Ignores the config for now.
"""
__metaclass__ = ABCMeta
def configure(self, log):
source_dir = getcwd()
generator = self.config.generator
run(
['cmake', '-G', generator, source_dir], output=log,
cwd=self.work_dir)
def build(self, log):
run(self.config.builder, log, cwd=self.work_dir)
def parse_args():
"""Parse command-line arguments."""
parser = ArgumentParser(description=__doc__)
parser.add_argument('--verbose', '-v', action='store_true')
parser.add_argument(
'--compilers', '-c', default=','.join(CXX),
help="Compilers, separated by commas. Default is %(default)s.")
parser.add_argument(
'--optimize', '-O', default=','.join(OPT),
help=(
"Alternative optimisation options, separated by commas. "
"Default is %(default)s."))
parser.add_argument(
'--stdlibs', '-L', default=','.join(STDLIB),
help=(
"Comma-separated options for choosing standard library. "
"Defaults to %(default)s."))
parser.add_argument(
'--logs', '-l', default='.', metavar='DIRECTORY',
help="Write build logs to DIRECTORY.")
parser.add_argument(
'--jobs', '-j', default=CPUS, metavar='CPUS',
help=(
"When running 'make', run up to CPUS concurrent processes. "
"Defaults to %(default)s."))
parser.add_argument(
'--minimal', '-m', action='store_true',
help="Make it as short a run as possible. For testing this script.")
return parser.parse_args()
def soft_get(queue, block=True):
"""Get an item off `queue`, or `None` if the queue is empty."""
try:
return queue.get(block)
except Empty:
return None
def read_queue(queue, block=True):
"""Read entries off `queue`, terminating when it gets a `None`.
Also terminates when the queue is empty.
"""
entry = soft_get(queue, block)
while entry is not None:
yield entry
entry = soft_get(queue, block)
def service_builds(in_queue, fail_queue, out_queue):
"""Worker process for "build" stage: process one job at a time.
Sends successful builds to `out_queue`, and failed builds to `fail_queue`.
Terminates when it receives a `None`, at which point it will send a `None`
into `out_queue` in turn.
"""
for build in read_queue(in_queue):
try:
build.do_build()
except Exception as error:
fail_queue.put((build, "%s" % error))
else:
out_queue.put(build)
in_queue.task_done()
# Mark the end of the queue.
out_queue.put(None)
def service_tests(in_queue, fail_queue, out_queue):
"""Worker process for "test" stage: test one build at a time.
Sends successful builds to `out_queue`, and failed builds to `fail_queue`.
Terminates when it receives a final `None`. Does not send out a final
`None` of its own.
"""
for build in read_queue(in_queue):
try:
build.do_test()
except Exception as error:
fail_queue.put((build, "%s" % error))
else:
out_queue.put(build)
in_queue.task_done()
def report_failures(queue, message):
"""Report failures from a failure queue. Return total number."""
failures = 0
for build, error in read_queue(queue, block=False):
print("%s: %s - %s" % (message, build.config.name(), error))
failures += 1
return failures
def count_entries(queue):
"""Get and discard all entries from `queue`, return the total count."""
total = 0
for _ in read_queue(queue, block=False):
total += 1
return total
def gather_builds(args):
"""Produce the list of builds we want to perform."""
if args.verbose:
print("\nChecking available compilers.")
compiler_candidates = args.compilers.split(',')
compilers = check_compilers(
compiler_candidates, args.stdlibs.split(','),
verbose=args.verbose)
if list(compilers) == []:
raise Fail(
"Did not find any viable compilers. Tried: %s."
% ', '.join(compiler_candidates))
opt_levels = args.optimize.split(',')
link_types = LINK.items()
debug_mixes = DEBUG.items()
if args.minimal:
compilers = compilers[:1]
opt_levels = opt_levels[:1]
link_types = list(link_types)[:1]
debug_mixes = list(debug_mixes)[:1]
builds = [
AutotoolsBuild(
args.logs,
AutotoolsConfig(
opt=opt, link=link, link_opts=link_opts, debug=debug,
debug_opts=debug_opts, cxx=cxx, stdlib=stdlib))
for opt in sorted(opt_levels)
for link, link_opts in sorted(link_types)
for debug, debug_opts in sorted(debug_mixes)
for cxx, stdlib in compilers
]
cmake = find_cmake_command()
if cmake is not None:
builds.append(CMakeBuild(args.logs, CMakeConfig(cmake)))
return builds
def enqueue(queue, build, *args):
"""Put `build` on `queue`.
Ignores additional arguments, so that it can be used as a clalback for
`Pool`.
We do this instead of a lambda in order to get the closure right. We want
the build for the current iteration, not the last one that was executed
before the lambda runs.
"""
queue.put(build)
def enqueue_error(queue, build, error):
"""Put the pair of `build` and `error` on `queue`."""
queue.put((build, error))
def main(args):
"""Do it all."""
if not os.path.isdir(args.logs):
raise Fail("Logs location '%s' is not a directory." % args.logs)
builds = gather_builds(args)
if args.verbose:
print("Lined up %d builds." % len(builds))
# The "configure" step is single-threaded. We can run many at the same
# time, even when we're also running a "build" step at the same time.
# This means we may run a lot more processes than we have CPUs, but there's
# no law against that. There's also I/O time to be covered.
configure_pool = Pool()
# Builds which have failed the "configure" stage, with their errors. This
# queue must never stall, so that we can let results pile up here while the
# work continues.
configure_fails = Queue(len(builds))
# Waiting list for the "build" stage. It contains Build objects,
# terminated by a final None to signify that there are no more builds to be
# done.
build_queue = JoinableQueue(10)
# Builds that have failed the "build" stage.
build_fails = Queue(len(builds))
# Waiting list for the "test" stage. It contains Build objects, terminated
# by a final None.
test_queue = JoinableQueue(10)
# The "build" step tries to utilise all CPUs, and it may use a fair bit of
# memory. Run only one of these at a time, in a single worker process.
build_worker = Process(
target=service_builds, args=(build_queue, build_fails, test_queue))
build_worker.start()
# Builds that have failed the "test" stage.
test_fails = Queue(len(builds))
# Completed builds. This must never stall.
done_queue = JoinableQueue(len(builds))
# The "test" step can not run concurrently (yet). So, run tests serially
# in a single worker process. It takes its jobs directly from the "build"
# worker.
test_worker = Process(
target=service_tests, args=(test_queue, test_fails, done_queue))
test_worker.start()
# Feed all builds into the "configure" pool. Each build which passes this
# stage goes into the "build" queue.
for build in builds:
configure_pool.apply_async(
build.do_configure, callback=partial(enqueue, build_queue, build),
error_callback=partial(enqueue_error, configure_fails, build))
if args.verbose:
print("All jobs are underway.")
configure_pool.close()
configure_pool.join()
# TODO: Async reporting for faster feedback.
configure_fail_count = report_failures(configure_fails, "CONFIGURE FAIL")
if args.verbose:
print("Configure stage done.")
# Mark the end of the build queue for the build worker.
build_queue.put(None)
build_worker.join()
# TODO: Async reporting for faster feedback.
build_fail_count = report_failures(build_fails, "BUILD FAIL")
if args.verbose:
print("Build step done.")
# Mark the end of the test queue for the test worker.
test_queue.put(None)
test_worker.join()
# TODO: Async reporting for faster feedback.
# TODO: Collate failures into meaningful output, e.g. "shared library fails."
test_fail_count = report_failures(test_fails, "TEST FAIL")
if args.verbose:
print("Test step done.")
# All done. Clean up.
for build in builds:
build.clean_up()
ok_count = count_entries(done_queue)
if ok_count == len(builds):
print("All tests OK.")
else:
print(
"Failures during configure: %d - build: %d - test: %d. OK: %d."
% (
configure_fail_count,
build_fail_count,
test_fail_count,
ok_count,
))
if __name__ == '__main__':
try:
exit(main(parse_args()))
except Fail as failure:
stderr.write("%s\n" % failure)
exit(2)

31
ext/libpqxx-7.7.3/tools/todo Executable file
View File

@@ -0,0 +1,31 @@
#! /bin/bash
#
# List "TODO" and "XXX" items in the given files, or throughout the source
# code.
set -e -u -o pipefail
# TODO: Make location-independent?
find_source() {
echo configure.ac
find . -name \*.cxx -o -name \*.hxx | sed -e 's|^\./||' | sort
for f in $(ls tools)
do
echo tools/$f
done
}
FILES=${*:-$(find_source)}
# Search for "$1:" in files $2.
# (This function adds the colon. That way, the search statement itself won't
# show up in the search.)
search_for() {
grep $1: $2
}
search_for XXX "$FILES" || true
search_for TODO "$FILES" || true

View File

@@ -0,0 +1,29 @@
#! /bin/bash
#
# Update the libpqxx copyright strings in the current directory.
#
# Usage: update-copyright [year]
#
# Where "year" is the new copyright year. Defaults to the current year.
#
# Assumes GNU grep and GNU sed.
set -eu -o pipefail
# The regexes are a bit awkward because they must work in both grep and sed.
#
# F'rinstance, PREFIX can't include the dash because our replacement string in
# sed would have a backreference (e.g. "\3") immediately followed by a year
# (e.g. 2022), and there's no clear boundary between the backreference number
# and the year: "\32022".
PREFIX='Copyright (c),* 2000'
YEAR='20[0-9][0-9]'
NEW_YEAR="${1:-$(date '+%Y')}"
SUFFIX=',* \(.* and \)*Jeroen T\. Vermeulen'
grep -rIl "$PREFIX-$YEAR$SUFFIX" |
xargs -r sed -i -e "s/\\($PREFIX\\)-$YEAR\\($SUFFIX\\)/\\1-$NEW_YEAR\\2/"
# This one is so different that I'd rather keep it a special case.
sed \
-i \
-e "s/\\(2000\\)-$YEAR\\(,* Jeroen T\\. Vermeulen\\)/\1-$NEW_YEAR\\2/" \
doc/conf.py