Synchronet now requires the libarchive development package (e.g. libarchive-dev on Debian-based Linux distros, libarchive.org for more info) to build successfully.

Commits (2)
......@@ -138,3 +138,9 @@ ifdef CRYPTLIBDIR
CRYPT_LDFLAGS += -L$(CRYPTLIBDIR)
endif
####################
# libarchive stuff #
####################
ifeq ($(os),win32)
LDFLAGS += -L$(3RDP_ROOT)/win32.release/libarchive/bin
endif
This diff is collapsed.
This diff is collapsed.
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" />
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<_PropertySheetDisplayName>Archive Library</_PropertySheetDisplayName>
</PropertyGroup>
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)/include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ClCompile>
<Link>
<AdditionalDependencies>$(MSBuildThisFileDirectory)/bin/archive.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<ItemGroup />
</Project>
\ No newline at end of file
/* zconf.h -- configuration of the zlib compression library
* Copyright (C) 1995-2005 Jean-loup Gailly.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id$ */
#ifndef ZCONF_H
#define ZCONF_H
/*
* If you *really* need a unique prefix for all types and library functions,
* compile with -DZ_PREFIX. The "standard" zlib should be compiled without it.
*/
#ifdef Z_PREFIX
# define deflateInit_ z_deflateInit_
# define deflate z_deflate
# define deflateEnd z_deflateEnd
# define inflateInit_ z_inflateInit_
# define inflate z_inflate
# define inflateEnd z_inflateEnd
# define deflateInit2_ z_deflateInit2_
# define deflateSetDictionary z_deflateSetDictionary
# define deflateCopy z_deflateCopy
# define deflateReset z_deflateReset
# define deflateParams z_deflateParams
# define deflateBound z_deflateBound
# define deflatePrime z_deflatePrime
# define inflateInit2_ z_inflateInit2_
# define inflateSetDictionary z_inflateSetDictionary
# define inflateSync z_inflateSync
# define inflateSyncPoint z_inflateSyncPoint
# define inflateCopy z_inflateCopy
# define inflateReset z_inflateReset
# define inflateBack z_inflateBack
# define inflateBackEnd z_inflateBackEnd
# define compress z_compress
# define compress2 z_compress2
# define compressBound z_compressBound
# define uncompress z_uncompress
# define adler32 z_adler32
# define crc32 z_crc32
# define get_crc_table z_get_crc_table
# define zError z_zError
# define alloc_func z_alloc_func
# define free_func z_free_func
# define in_func z_in_func
# define out_func z_out_func
# define Byte z_Byte
# define uInt z_uInt
# define uLong z_uLong
# define Bytef z_Bytef
# define charf z_charf
# define intf z_intf
# define uIntf z_uIntf
# define uLongf z_uLongf
# define voidpf z_voidpf
# define voidp z_voidp
#endif
#if defined(__MSDOS__) && !defined(MSDOS)
# define MSDOS
#endif
#if (defined(OS_2) || defined(__OS2__)) && !defined(OS2)
# define OS2
#endif
#if defined(_WINDOWS) && !defined(WINDOWS)
# define WINDOWS
#endif
#if defined(_WIN32) || defined(_WIN32_WCE) || defined(__WIN32__)
# ifndef WIN32
# define WIN32
# endif
#endif
#if (defined(MSDOS) || defined(OS2) || defined(WINDOWS)) && !defined(WIN32)
# if !defined(__GNUC__) && !defined(__FLAT__) && !defined(__386__)
# ifndef SYS16BIT
# define SYS16BIT
# endif
# endif
#endif
/*
* Compile with -DMAXSEG_64K if the alloc function cannot allocate more
* than 64k bytes at a time (needed on systems with 16-bit int).
*/
#ifdef SYS16BIT
# define MAXSEG_64K
#endif
#ifdef MSDOS
# define UNALIGNED_OK
#endif
#ifdef __STDC_VERSION__
# ifndef STDC
# define STDC
# endif
# if __STDC_VERSION__ >= 199901L
# ifndef STDC99
# define STDC99
# endif
# endif
#endif
#if !defined(STDC) && (defined(__STDC__) || defined(__cplusplus))
# define STDC
#endif
#if !defined(STDC) && (defined(__GNUC__) || defined(__BORLANDC__))
# define STDC
#endif
#if !defined(STDC) && (defined(MSDOS) || defined(WINDOWS) || defined(WIN32))
# define STDC
#endif
#if !defined(STDC) && (defined(OS2) || defined(__HOS_AIX__))
# define STDC
#endif
#if defined(__OS400__) && !defined(STDC) /* iSeries (formerly AS/400). */
# define STDC
#endif
#ifndef STDC
# ifndef const /* cannot use !defined(STDC) && !defined(const) on Mac */
# define const /* note: need a more gentle solution here */
# endif
#endif
/* Some Mac compilers merge all .h files incorrectly: */
#if defined(__MWERKS__)||defined(applec)||defined(THINK_C)||defined(__SC__)
# define NO_DUMMY_DECL
#endif
/* Maximum value for memLevel in deflateInit2 */
#ifndef MAX_MEM_LEVEL
# ifdef MAXSEG_64K
# define MAX_MEM_LEVEL 8
# else
# define MAX_MEM_LEVEL 9
# endif
#endif
/* Maximum value for windowBits in deflateInit2 and inflateInit2.
* WARNING: reducing MAX_WBITS makes minigzip unable to extract .gz files
* created by gzip. (Files created by minigzip can still be extracted by
* gzip.)
*/
#ifndef MAX_WBITS
# define MAX_WBITS 15 /* 32K LZ77 window */
#endif
/* The memory requirements for deflate are (in bytes):
(1 << (windowBits+2)) + (1 << (memLevel+9))
that is: 128K for windowBits=15 + 128K for memLevel = 8 (default values)
plus a few kilobytes for small objects. For example, if you want to reduce
the default memory requirements from 256K to 128K, compile with
make CFLAGS="-O -DMAX_WBITS=14 -DMAX_MEM_LEVEL=7"
Of course this will generally degrade compression (there's no free lunch).
The memory requirements for inflate are (in bytes) 1 << windowBits
that is, 32K for windowBits=15 (default value) plus a few kilobytes
for small objects.
*/
/* Type declarations */
#ifndef OF /* function prototypes */
# ifdef STDC
# define OF(args) args
# else
# define OF(args) ()
# endif
#endif
/* The following definitions for FAR are needed only for MSDOS mixed
* model programming (small or medium model with some far allocations).
* This was tested only with MSC; for other MSDOS compilers you may have
* to define NO_MEMCPY in zutil.h. If you don't need the mixed model,
* just define FAR to be empty.
*/
#ifdef SYS16BIT
# if defined(M_I86SM) || defined(M_I86MM)
/* MSC small or medium model */
# define SMALL_MEDIUM
# ifdef _MSC_VER
# define FAR _far
# else
# define FAR far
# endif
# endif
# if (defined(__SMALL__) || defined(__MEDIUM__))
/* Turbo C small or medium model */
# define SMALL_MEDIUM
# ifdef __BORLANDC__
# define FAR _far
# else
# define FAR far
# endif
# endif
#endif
#if defined(WINDOWS) || defined(WIN32)
/* If building or using zlib as a DLL, define ZLIB_DLL.
* This is not mandatory, but it offers a little performance increase.
*/
# ifdef ZLIB_DLL
# if defined(WIN32) && (!defined(__BORLANDC__) || (__BORLANDC__ >= 0x500))
# ifdef ZLIB_INTERNAL
# define ZEXTERN extern __declspec(dllexport)
# else
# define ZEXTERN extern __declspec(dllimport)
# endif
# endif
# endif /* ZLIB_DLL */
/* If building or using zlib with the WINAPI/WINAPIV calling convention,
* define ZLIB_WINAPI.
* Caution: the standard ZLIB1.DLL is NOT compiled using ZLIB_WINAPI.
*/
# ifdef ZLIB_WINAPI
# ifdef FAR
# undef FAR
# endif
# include <windows.h>
/* No need for _export, use ZLIB.DEF instead. */
/* For complete Windows compatibility, use WINAPI, not __stdcall. */
# define ZEXPORT WINAPI
# ifdef WIN32
# define ZEXPORTVA WINAPIV
# else
# define ZEXPORTVA FAR CDECL
# endif
# endif
#endif
#if defined (__BEOS__)
# ifdef ZLIB_DLL
# ifdef ZLIB_INTERNAL
# define ZEXPORT __declspec(dllexport)
# define ZEXPORTVA __declspec(dllexport)
# else
# define ZEXPORT __declspec(dllimport)
# define ZEXPORTVA __declspec(dllimport)
# endif
# endif
#endif
#ifndef ZEXTERN
# define ZEXTERN extern
#endif
#ifndef ZEXPORT
# define ZEXPORT
#endif
#ifndef ZEXPORTVA
# define ZEXPORTVA
#endif
#ifndef FAR
# define FAR
#endif
#if !defined(__MACTYPES__)
typedef unsigned char Byte; /* 8 bits */
#endif
typedef unsigned int uInt; /* 16 bits or more */
typedef unsigned long uLong; /* 32 bits or more */
#ifdef SMALL_MEDIUM
/* Borland C/C++ and some old MSC versions ignore FAR inside typedef */
# define Bytef Byte FAR
#else
typedef Byte FAR Bytef;
#endif
typedef char FAR charf;
typedef int FAR intf;
typedef uInt FAR uIntf;
typedef uLong FAR uLongf;
#ifdef STDC
typedef void const *voidpc;
typedef void FAR *voidpf;
typedef void *voidp;
#else
typedef Byte const *voidpc;
typedef Byte FAR *voidpf;
typedef Byte *voidp;
#endif
#if 0 /* HAVE_UNISTD_H -- this line is updated by ./configure */
# include <sys/types.h> /* for off_t */
# include <unistd.h> /* for SEEK_* and off_t */
# ifdef VMS
# include <unixio.h> /* for off_t */
# endif
# define z_off_t off_t
#endif
#ifndef SEEK_SET
# define SEEK_SET 0 /* Seek from beginning of file. */
# define SEEK_CUR 1 /* Seek from current position. */
# define SEEK_END 2 /* Set file pointer to EOF plus "offset" */
#endif
#ifndef z_off_t
# define z_off_t long
#endif
#if defined(__OS400__)
# define NO_vsnprintf
#endif
#if defined(__MVS__)
# define NO_vsnprintf
# ifdef FAR
# undef FAR
# endif
#endif
/* MVS linker does not support external names larger than 8 bytes */
#if defined(__MVS__)
# pragma map(deflateInit_,"DEIN")
# pragma map(deflateInit2_,"DEIN2")
# pragma map(deflateEnd,"DEEND")
# pragma map(deflateBound,"DEBND")
# pragma map(inflateInit_,"ININ")
# pragma map(inflateInit2_,"ININ2")
# pragma map(inflateEnd,"INEND")
# pragma map(inflateSync,"INSY")
# pragma map(inflateSetDictionary,"INSEDI")
# pragma map(compressBound,"CMBND")
# pragma map(inflate_table,"INTABL")
# pragma map(inflate_fast,"INFA")
# pragma map(inflate_copyright,"INCOPY")
#endif
#endif /* ZCONF_H */
This diff is collapsed.
No preview for this file type
......@@ -96,7 +96,7 @@
" %4u\b\b\b\b\1h%s \1n\1c(\1h\1`?\1n\1c=Menu) (\1h%u\1n\1c of \1h%u\1n\1c): \1n\1~"
"\r\nYou didn't post message #%d\r\n" 072 YouDidntPostMsgN
"\1?Delete message #%u '%s'" 073 DeletePostQ
"\1n\1b[\1h\1wI\1n\1b] \1hAutoLogon via IP address "\ 074 UserDefaultsAutoLogon
"\1n\1b[\1h\1wI\1n\1b] \1hAutoLogon via IP address "\ 074 UserDefaultsAutoLogon
"\1n\1b: \1c%s\r\n"
"\1n\r\n\1m%s sent to \1h%s #%u\r\n" 075 MsgSentToUser
"\1_\r\n\1y\1hText to search for: " 076 SearchStringPrompt
......@@ -201,8 +201,8 @@
"Delete Guru file" 163 DeleteGuruLogQ
"\1n\1g\7Telegram from \1n\1h%s\1n\1g on %s:\r\n\1h" 164 TelegramFmt
"\r\n\r\nYou can't download.\r\n" 165 R_Download
"\r\n\1w\1hSearching all directories @ELLIPSIS@\r\n" 166 SearchingAllDirs
"\1w\1hSearching all libraries @ELLIPSIS@\r\n" 167 SearchingAllLibs
"\r\n\1w\1hSearching current library @ELLIPSIS@\r\n\1q" 166 SearchingAllDirs
"\1w\1hSearching all libraries @ELLIPSIS@\r\n\1q" 167 SearchingAllLibs
"\r\n\1w\1h%u Files Listed.\r\n" 168 NFilesListed
"\r\n\1w\1hEmpty directory.\r\n" 169 EmptyDir
"\r\n\1n\1cSearching for files "\ 170 NScanHdr
......@@ -278,7 +278,7 @@
"\r\n\7\1r\1h\1iBatch upload queue is full.\1n\r\n" 229 BatchUlQueueIsFull
"\r\n\1n\1m\1h%s \1n\1madded to batch upload queue"\ 230 FileAddedToUlQueue
"\1c - Files: \1h%u \1n\1c(\1h%u\1n\1c Max)\r\n"
"\7\1_\1w\1hNode %2d: \1g%s\1n\1g sent you a file.\r\n" 231 UserToUserXferNodeMsg
"Unused 231" 231 UserToUserXferNodeMsg
"\1n\1?\1g\1h%s\1y: \1w~B\1yatch download, "\ 232 FileInfoPrompt
"\1w~E\1yxtended info, "\
"\1w~V\1yiew file, "\
......@@ -312,14 +312,14 @@
"\1_\1y\1hCredit value : \1n" 257 EditCreditValue
"\1_\1y\1hTimes downloaded : \1n" 258 EditTimesDownloaded
"\1_\1y\1hOpen count : \1n" 259 EditOpenCount
"\1_\1y\1hAlternate Path : \1n" 260 EditAltPath
"UNUSED260" 260 Unused260
"\r\n\1w\1hYou only have %s credits.\r\n" 261 YouOnlyHaveNCredits
"\r\nYou don't have enough credits.\r\n" 262 NotEnoughCredits
"\r\n\1w\1hNot enough time left to transfer.\r\n" 263 NotEnoughTimeToDl
"\r\nProtocol, ~Batch, ~Quit, or [~Next]: " 264 ProtocolBatchQuitOrNext
"\r\nBulk Upload %s %s Directory\r\n"\ 265 BulkUpload
"(Enter '-' for description to skip file):\r\n"
"\1_\1y\1h%s\1w%7uk\1b:" 266 BulkUploadDescPrompt
"\1_\1y\1h%-12s\1w%7uk\1b:" 266 BulkUploadDescPrompt
"\r\n\1r\1h\1iNo files in batch queue.\1n"\ 267 NoFilesInBatchQueue
"\r\n\r\n\1mUse \1hD\1n\1m or \1hU\1n\1m to add files to the queue.\r\n"
"\1_\r\n\1y\1hBatch: \1n" 268 BatchMenuPrompt
......@@ -356,16 +356,14 @@
"\r\nUploader: %s\r\nFilename: %s\r\n" 294 TempFileInfo
"\r\n%s bytes in %u files\r\n" 295 TempDirTotal
"\r\n%u files removed.\r\n" 296 NFilesRemoved
"\1r\1h\1iAll other nodes should NOT be in use "\ 297 ResortWarning
"during resort/compression.\1n\r\n"
"\1-\1c%-15.15s \1y\1h%-25.25s " 298 ResortLineFmt
"\1bEmpty\1n\r\n" 299 ResortEmptyDir
"\1wSorting @ELLIPSIS@" 300 Sorting
"\b\b\b\b\b\b\b\b\b\b\1bSorted \1n\r\n" 301 Sorted
"\b\b\b\b\b\b\b\b\b\b\1bCompressed %u slots "\ 302 Compressed
"(%s bytes)\1n\r\n"
"\1w\1h\r\n%s is already in the queue.\r\n" 303 FileAlreadyInQueue
"\1w\1h\1/File is not online.\r\n" 304 FileIsNotOnline
"Tag this file" 297 TagFileQ
"\1h\1yEnter (space-separated) Tags: " 298 TagFilePrompt
"UNUSED299" 299 Unused299
"UNUSED300" 300 Unused300
"UNUSED301" 301 Unused301
"UNUSED302" 302 Unused302
"\1w\1h\r\n%s is already in the queue.\r\n" 303 FileAlreadyInQueue
"\1w\1h\1/File is not online.\r\n" 304 FileIsNotOnline
"\1n\r\n\1m\1h%s \1n\1madded to batch download queue -\r\n"\ 305 FileAddedToBatDlQueue
"\1cFiles: \1h%u\1n\1c (\1h%u\1n\1c Max) Credits: \1h%s\1n\1c"\
" Bytes: \1h%s\1n\1c Time: \1h%s\r\n"
......@@ -379,7 +377,7 @@
"\1_\1h\1w%s was %sdownloaded by %s\r\n"\ 312 DownloadUserMsg
"\1n\1gYou were awarded %s credits.\r\n"
"partially " 313 Partially
"\r\n\1n\1gLibrary :\1h (%u) %s" 314 FiLib
"\1l\1n\1gLibrary :\1h (%u) %s" 314 FiLib
"\r\n\1n\1gDirectory :\1h (%u) %s" 315 FiDir
"\r\n\1n\1gFilename :\1h %s" 316 FiFilename
"\r\n\1n\1gFile size :\1h %s (%s) bytes" 317 FiFileSize
......@@ -391,9 +389,9 @@
"\r\n\1n\1gLast downloaded :\1h %s" 323 FiDateDled
"\r\n\1n\1gTimes downloaded :\1h %u" 324 FiTimesDled
"\r\n\1n\1gTime to download :\1h %s" 325 FiTransferTime
"\r\n\1n\1gAlternate Path :\1h %s" 326 FiAlternatePath
"\r\n\1r\1h\1iInvalid Alternate Path Number: %u\1n" 327 InvalidAlternatePathN
"\1_\1/\1w\1hFile is currently open by %d user%s.\r\n" 328 FileIsOpen
"\r\n\1n\1gTags :\1h %s" 326 FiTags
"UNUSED327" 327 Unused327
"\r\n\1n\1gFile %-6.6s :\1h %s" 328 FiChecksum
"\7\7\r\n\1h\1rH\1ba\1gp\1yp\1cy \1mB\1wi\1rr\1gt\1bh\1cd\1ma\1yy "\ 329 HappyBirthday
"\1wt\1ro \1gy\1bo\1cu\r\n\7\7\1mH\1ya\1wp\1rp\1gy "\
"\1bB\1ci\1mr\1yt\1wh\1rd\1ga\1by \1ct\1mo \1yy\1wo\1ru\1g.\1b.\1c.\r\n\r\n"
......@@ -430,10 +428,8 @@
"same time.\1n\r\n"
"\7\1r\1h\1i%d critical errors have occurred. "\ 359 CriticalErrors
"Type ;ERR at main menu.\1n\r\n"
"\1_\1w\1hYou have %d User to User Transfer%s "\ 360 UserXferForYou
"waiting for you\r\n"
"\1_\1w\1hYou have sent %d unreceived User to "\ 361 UnreceivedUserXfer
"User Transfer%s\r\n"
"Unused360" 360 UserXferForYou
"Unused361" 361 UnreceivedUserXfer
"Read your mail now" 362 ReadYourMailNowQ
"Sorry, the system is closed to new users.\r\n" 363 NoNewUsers
"New User Password: " 364 NewUserPasswordPrompt
......@@ -737,7 +733,7 @@
"\r\n%u credits have been added to your account.\r\n" 592 CreditedAccount
"\r\nANSI Capture is now %s\r\n" 593 ANSICaptureIsNow
"\1n\1m\r\nRetrieving \1h%s\1n\1m..." 594 RetrievingFile
"\1n\r\nAlternate upload path now: %s\r\n" 595 AltULPathIsNow
"UNUSED595" 595 Unused595
"\r\nPrivate" 596 PrivatePostQ
"\r\n\1_\1y\1hPost to: " 597 PostTo
"\r\nPrivate posts require a destination user "\ 598 NoToUser
......
_New FileBases_
The new FileBase files are stored in the same database location as before
(e.g. data/dirs/), but the file extensions are different:
^ Purpose ^ Old ^ New ^
| Index (e.g. filenames) | code.ixb | code.sid |
| Data (e.g. descriptions) | code.dat | code.shd |
| Extended Descriptions | code.exb | code.sdt |
| Metadata | code.dab | code.ini |
| Allocation Tables | N/A | code.sda and code.sha |
If these new filebase file extensions look familiar to you, that's because
we're using the Synchronet Message Base format/library (v3.0 now) for the
underlying database. This means that the SMB tools you may be familiar with
(e.g. CHKSMB, FIXSMB, SMBUTIL) also work on the new filebases.
The conversion of the filebases to the new format should occur automatically
when you run 'jsexec update' which in turn will execute the program
'upgrade_to_v319' when appropriate (just one time). Once converted, you can
delete the old files or leave them in place in case you need to revert back to
Synchronet v3.18 for any reason. The old filebase files won't harm anything if
left.
The creation of each new filebase will automatically calculate and store the
hashes of the contents of the actual files available for download. These
hashes are useful for duplicate file detection and data integrity assurance.
If you wish to opt-out of the file hashing (which consumes the majority of the
time during the upgrade process), you can turn off file hashing in the
per-directory Toggle Options in SCFG->File Areas. You would have to perform
this opt-out for the directories of choice *before* you run 'jsexec update' /
'upgrade_to_v319'. You should not normally need to run 'upgrade_to_v319'.
_Long Filenames_
While filenames stored in the filebases used to be limited to MS-DOS
compatible 8.3 formatted names, longer filenames are now supported on all
platforms. Additionally, some previously invalid filename characters (e.g.
spaces) are now allowed and files without extensions (i.e. no '.' in their
filename are now supported).
Although Synchronet for Windows previously used Win32 API functions for short
<-> long filename conversions in the Windows builds of Synchronet, resulting
in the unfortunate Micros~1 shorted filenames stored and sometimes seen, that
is no longer the case. Except for the %~ command-line specifier, those
short/long filename conversion functions are no longer in use anywhere within
Synchronet for Windows - it's native filenames through-out. The filebase
conversion process (upgrade_to_v319 / 'jsexec update') on Windows will attempt
to automatically resolve the native/long filenames and store those names and
only those names in the new filebases.
Note: abbreviated versions of long filenames are displayed in some situations
to accommodate the limited width of a traditional BBS user terminal. An effort
is made to always display the full file extension/suffix however
(e.g. "longfilename.jpeg" may be *displayed* as "longfil.jpeg").
Note: only 64 characters of each filename (always including any extension) are
indexed for searches and duplicate checking, but the entire filename, up to
64K characters in length, is stored intact in the filebase.
Filenames with /extensions/ longer than 3 characters, e.g. ".jpeg", ".tar.gz",
can be added to the filebases, but the configurable compressible, extractable,
and viewable file types/extensions remain limited to 3 characters in SCFG.
Similarly, a maximum length of 3 character archive "types" are stored per BBS
user record (for each user's QWK packet format and temp archive preference).
_Large Files_
Files greater in size than 2GB or 4GB (depending) were previously a problem.
Though there are still some 32-bit file length limitations (e.g. only files
smaller than 4GB in size will be hashed), there is better and increasing
support for larger files in general.
Note: the ZMODEM transfer protocol as designed by Chuck Forsberg only supports
files up to 4GB in size and in many cases, files greater than or equal to 2GB
in size will prove difficult or impossible to transfer between some ZMODEM
implementations. In general, it is recommended to use an alternate transfer
protocol (e.g. YMODEM[-G], FTP, HTTP) for files >= 2GB in length.
_Large Directories_
Due to the new filebase design, directories with more than 10,000 files are
now supported (though, not encouraged).
_Descriptions_
The file "summary" or single-line "short description" remains limited to 58
characters in length for practical purposes. Though longer file summaries
(up to 64KB) can be stored in the filebase, they are not recommended.
Extended (multi-line) descriptions may now span more than the previous limit
of 10 lines or 512 total characters. There is no technical limit to the length
of extended file descriptions, though a limit of 1024 characters imported from
description files embedded in archives (e.g. FILE_ID.DIZ) is being imposed. If
you have a need for longer (than 1024 character) extended descriptions
imported from embedded description files, please provide me with details.
_Batches_
File upload and download batch queues used to be maintained in memory (though
they were written to disk files to be retained between user logons), they are
now entirely maintained in disk files (data/user/*.upload and *.dnload in .ini
file format). This means that custom batch management can now be performed
easily by modules or non-Terminal Server scripts.
_Hashes_
Files are now hashed, by default, using multiple hashing algorithms (CRC16,
CRC32, MD5, and SHA1) for duplicate file detection and for reporting to users
(e.g. to insure data integrity). For a file to be considered a duplicate
(i.e. and rejected for upload) it must have the same size and hash values as
another file in a filebase already. Each directory is configurable as to
whether or not to hash its files or use it for duplicate file detection
(by name or hash).
_Sorting_
While the old filebases
The new filebases are indexed in the order in which the files are imported
into the database. Sorting of the files for display purposes in the terminal
and FTP servers is optional and configured by the sysop:
Name Ascending (case-insensitive)
Name Descending (case-insensitive)
Name Ascending (case-sensitive)
Name Descending (case-sensitive)
Date Ascending
Date Descending
As a result, the "RESORT" file transfer operator command has been removed.
_Tags_
Individual files can now be tagged for easy searching/grouping. This feature
will be utilized/enhanced more in the future.
_JavaScript_
The new "FileBase" class is used (similar to the existing MsgBase class) to
open and access filebases from JavaScript modules. Using this class, the
defaults methods of listing and transferring files can be replaced with
custom modules.
_TickIT_
The new FileBase JS class is now used to import files directly from FidoNet
-style .TIC files (via tickit.js) so no dependency or invocation of any
external utilities (e.g. addfiles) is required.
_Utilities_
The native utilities ADDFILES, FILELIST, DELFILES, and DUPEFIND have been
replaced with similarly named and purposed JavaScript utility scripts to be
invoked with JSexec:
- addfiles.js for importing lists of files into filebases
- postfile.js for importing a single file into a filebase
- filelist.js for generating file listings from filebases
- delfiles.js for removing files from filebases
- dupefind.js for discovering and reporting duplicate files in the filebases
_Performance_
Due to the nature and use of the new filebase API, file listings are much
faster (e.g. large file listings from the Synchronet FTP server) as well as
filename/pattern, description text, and duplicate-file searching.
_FTP Server_
Due to the removal of support for rendering FTP-downloaded content (e.g.
HTML files) in modern web browsers, the FTP Server no longer supports dynamic
HTML index file generation (e.g. 00index.html). Instead, we will focus on
better support for filebase browsing and file transfers via HTTP and HTTPS in
addition to the traditional FTP and FTPS uses. The dynamic generation of
the ASCII file listings via FTP (e.g. 00index) is still supported by the FTP
server, though now much faster than before.
_libarchive_
The libarchive library (http://libarchive.org/) has now been integrated into
Synchronet (and exposed via the new "Archive" JavaScript class) and integrated
into SBBSecho so that the creation, listing/viewing, and extraction of
archived files can now be performed "in-process" without the invocation of or
dependency on external programs (e.g. Info-Zip unzip or PKUNZIP).
Formats fully supported:
- zip
- 7zip
- gzipped-tar
- bzipped-tar
Formats supported for viewing and extraction only:
- rar
- lha/lzh
- iso
- xar
- cab
This means that for most BBSes, no "Compressible" or "Extractable" file types
need to be configured in SCFG->File Options. Additionally, by setting
"Archive Format" to "ZIP" for SCFG->Networks->QWK->Hubs, no "pack" or "unpack"
command-line need be configured.
For listing the contents of archives, the new archive.js utility script may be
installed as a "Viewable File Type" handler for the commonly supported file
extensions by running 'jsexec archive.js install'.
_DIZ_
Description files embedded in archives (e.g. FILE_ID.DIZ) are now supported
more uniformly and seamlessly.
_File Echoes_
Each file transfer directory configured in SCFG->File Areas may now have an
"Area Tag" explicitly set for FidoNet-style file distribution networks. If
an Area Tag is not explicitly set, then the directory's short name is used
(with spaces replaced with underscores) automatically. tickit.js now uses
this new "area_tag" file_area.dir[] JS property for its "AutoAreas" feature.
_User to User Files_
The user to user file transfer feature has been removed. Send file attachments
with email/netmail if you want to send files to users.
_Opened Files_
The "open" (reference) counter for files is now gone. If you want to remove
a file from the filebase while a user has it in their batch download queue or
is actively downloading it, nothing is preventing you from doing so.
As a result, the "CLOSE" file transfer operator command has been removed.
_Alternate File Paths_
The support for "Alternate File Paths" has been removed. There are better
modern operating/file system solutions to the original problem solved with
this feature.
As a result, the "ALTUL" file transfer operator command has been removed.
_Bi-directional File Transfers_
The protocol drivers that supported bi-directional file transfers (Bi-Modem,
HS/Link) are now long unsupported DOS/OS2 programs with no equivalent in the
modern world. Bye bye Bi-modem. :-(
\ No newline at end of file
// Add files to a file base/area directory for SBBS v3.19+
// Replaces functionality of the old ADDFILES program written in C
require("sbbsdefs.js", 'LEN_FDESC');
"use strict";
const default_excludes = [
"FILES.BBS",
"FILE_ID.DIZ",
"DESCRIPT.ION",
"SFFILES.BBS"
];
if(argv.indexOf("-help") >= 0 || argv.indexOf("-?") >= 0) {
print("usage: [-options] [dir-code] [listfile]");
print("options:");
print("-all add files in all libraries/directories (implies -auto)");
print("-lib=<name> add files in all directories of specified library (implies -auto)");
print("-from=<name> specify uploader's user name (may require quotes)");
print("-ex=<filename> add to excluded filename list");
print(" (default: " + default_excludes.join(',') + ")");
print("-diz always extract/use description in archive");
print("-update update existing file entries (default is to skip them)");
print("-date[=fmt] include today's date in description");
print("-fdate[=fmt] include file's date in description");
print("-adate[=fmt] include newest archived file date in description");
print(" (fmt = optional strftime date/time format string)");
print("-v increase verbosity of output");
print("-debug enable debug output");
exit(0);
}
function datestr(t)
{
if(date_fmt)
return strftime(date_fmt, t);
return system.datestr(t);
}
function archive_date(file)
{
try {
var list = Archive(file).list();
} catch(e) {
return file_date(file);
}
var t = 0;
for(var i = 0; i < list.length; i++)
t = Math.max(list[i].time, t);
return t;
}
var uploader;
var listfile;
var date_fmt;
var options = {};
var exclude = [];
var dir_list = [];
var verbosity = 0;
for(var i = 0; i < argc; i++) {
var arg = argv[i];
if(arg[0] == '-') {
if(arg.indexOf("-ex=") == 0) {
exclude.push(arg.slice(4).toUpperCase());
continue;
}
if(arg.indexOf("-lib=") == 0) {
var lib = arg.slice(5);
if(!file_area.lib[lib]) {
alert("Library not found: " + lib);
exit(1);
}
for(var j = 0; j < file_area.lib[lib].dir_list.length; j++)
dir_list.push(file_area.lib[lib].dir_list[j].code);
options.auto = true;
continue;
}
if(arg.indexOf("-from=") == 0) {
uploader = arg.slice(6);
continue;
}
if(arg.indexOf("-date=") == 0) {
date_fmt = arg.slice(6);
options.date = true;
continue;
}
if(arg.indexOf("-fdate=") == 0) {
date_fmt = arg.slice(7);
options.fdate = true;
continue;
}
if(arg.indexOf("-adate=") == 0) {
date_fmt = arg.slice(7);
options.adate = true;
continue;
}
if(arg == '-' || arg == '-all') {
for(var dir in file_area.dir)
dir_list.push(dir);
options.auto = true;
continue;
}
if(arg[1] == 'v') {
var j = 1;
while(arg[j++] == 'v')
verbosity++;
continue;
}
options[arg.slice(1)] = true;
} else {
if(!dir_list.length)
dir_list.push(arg);
else
listfile = arg;
}
}
if(exclude.length < 1)
exclude = default_excludes;
if(listfile)
exclude.push(listfile.toUpperCase());
if(!dir_list.length) {
var code;
while(!file_area.dir[code] && !js.terminated) {
for(var d in file_area.dir)
print(d);
code = prompt("Directory code");
}
dir_list.push(code);
}
var added = 0;
var updated = 0;
for(var d = 0; d < dir_list.length; d++) {
var code = dir_list[d];
var dir = file_area.dir[code];
if(!dir) {
alert("Directory '" + code + "' does not exist in configuration");
continue;
}
if(options.auto && (dir.settings & DIR_NOAUTO))
continue;
print("Adding files to " + dir.lib_name + " " + dir.name);
var filebase = new FileBase(code);
if(!filebase.open("r")) {
alert("Failed to open: " + filebase.file);
continue;
}
var name_list = filebase.get_names();
// Convert to uppercase
for(var i = 0; i < name_list.length; i++) {
name_list[i] = name_list[i].toUpperCase();
if(options.debug)
print(name_list[i]);
}
var file_list = [];
if(listfile) {
var listpath = file_getcase(dir.path + listfile) || file_getcase(listfile);
var f = new File(listpath);
if(f.exists) {
print("Opening " + f.name);
if(!f.open('r')) {
alert("Error " + f.error + " (" + strerror(f.error) + ") opening " + f.name);
exit(1);
}
file_list = parse_file_list(f.readAll());
f.close();
} else {
alert(dir.path + file_getname(listfile) + " does not exist");
}
}
else {
var list = directory(dir.path + '*');
for(var i = 0; i < list.length; i++) {
if(!file_isdir(list[i]))
file_list.push({ name: file_getname(list[i]) });
}
}
for(var i = 0; i < file_list.length; i++) {
var file = file_list[i];
file.from = uploader;
if(options.debug)
print(JSON.stringify(file, null, 4));
else if(verbosity)
printf("%s ", file.name);
if(exclude.indexOf(file.name.toUpperCase()) >= 0) {
if(verbosity)
print("excluded (ignored)");
continue;
}
file.extdesc = lfexpand(file.extdesc);
if(verbosity > 1)
print(JSON.stringify(file));
var exists = name_list.indexOf(filebase.get_name(file.name).toUpperCase()) >= 0;
if(exists && !options.update) {
if(verbosity)
print("already added");
continue;
}
var path = file_area.dir[code].path + file.name;
if(!file_exists(path)) {
alert("does not exist: " + path);
continue;
}
if(options.date)
file.desc = datestr(time()) + " " + file.desc;
else if(options.fdate)
file.desc = datestr(file_date(path)) + " " + file.desc;
else if(options.adate)
file.desc = datestr(archive_date(path)) + " " + file.desc;
file.cost = file_size(path);
if(exists) {
var hash = filebase.hash(file.name);
if(hash) {
file.size = hash.size;
file.crc16 = hash.crc16;
file.crc32 = hash.crc32;
file.md5 = hash.md5;
file.sha1 = hash.sha1;
}
if(!filebase.update(file.name, file, options.diz)) {
alert("Error " + filebase.last_error + " updating " + file.name);
} else {
print("Updated " + file.name);
updated++;
}
} else {
// Add file here:
if(!filebase.add(file, options.diz)) {
alert("Error " + filebase.last_error + " adding " + file.name);
} else {
print("Added " + file.name);
added++;
}
}
}
filebase.close();
}
print(added + " files added");
if(updated)
print(updated + " files updated");
// Parse a FILES.BBS (or similar) file listing file
// Note: file descriptions must begin with an alphabetic character
function parse_file_list(lines)
{
var file_list = [];
for(var i = 0; i < lines.length; i++) {
var line = lines[i];
var match = line.match(/(^[\w]+[\w\-\!\#\.]*)\W+[^A-Za-z]*(.*)/);
// print('fname line match: ' + JSON.stringify(match));
if(match && match.length > 1) {
var file = { name: match[1], desc: match[2] };
if(file.desc && file.desc.length > LEN_FDESC)
file.extdesc = word_wrap(file.desc, 45);
file_list.push(file);
continue;
}
match = line.match(/\W+\|\s+(.*)/);
if(!match) {
if(verbosity)
alert("Ignoring line: " + line);
continue;
}
// print('match: ' + JSON.stringify(match));
if(match && match.length > 1 && file_list.length) {
var file = file_list[file_list.length - 1];
if(!file.extdesc)
file.extdesc = file.desc + "\n";
file.extdesc += match[1] + "\n";
var combined = file.desc + " " + match[1].trim();
if(combined.length <= LEN_FDESC)
file.desc = combined;
}
}
return file_list;
}
// Deal with archive files using Synchronet v3.19 Archive class
// Install "Viewable File Types" using 'jsexec archive.js install'
"use strict";
var cmd = argv.shift();
var fname = argv.shift();
var verbose = false;
var i = argv.indexOf('-v');
if(i >= 0) {
verbose = true;
argv.splice(i, 1);
}
switch(cmd) {
case 'list':
list(fname, verbose);
break;
case 'json':
writeln(JSON.stringify(Archive(fname).list(verbose, argv[0]), null, 4));
break;
case 'create':
print(Archive(fname).create(directory(argv[0])) + " files archived");
break;
case 'extract':
var a = Archive(fname);
print(a.extract.apply(a, argv) + " files extracted");
break;
case 'read':
print(Archive(fname).read(argv[0]));
break;
case 'type':
print(Archive(fname).type);
break;
case 'install':
install();
break;
default:
throw new Error("invalid command: " + cmd);
}
function list(filename, verbose)
{
var list;
try {
list = Archive(filename).list(verbose);
} catch(e) {
alert(file_getname(filename) + ": Unsupported archive format");
return;
}
var dir_fmt = "\x01n%s";
var file_fmt = "\x01n \x01c\x01h%-*s \x01n\x01c%10lu ";
if(verbose)
file_fmt += "\x01h%08lX ";
file_fmt += "\x01h\x01w%s";
if(!js.global.console) {
dir_fmt = strip_ctrl(dir_fmt);
file_fmt = strip_ctrl(file_fmt);
}
var longest_name = 0;
for(var i = 0; i < list.length; i++) {
longest_name = Math.max(longest_name, file_getname(list[i].name).length);
}
var curpath;
for(var i = 0; i < list.length && !js.terminated && (!js.global.console || !console.aborted); i++) {
if(list[i].type != "file")
continue;
else {
var fname = file_getname(list[i].name);
var path = list[i].name.slice(0, -fname.length);
if(path != curpath)
writeln(format(dir_fmt, path ? path : "[root]"));
if(verbose)
writeln(format(file_fmt
,longest_name, fname, list[i].size, list[i].crc32
,system.timestr(list[i].time).slice(4)));
else
writeln(format(file_fmt
,longest_name, fname, list[i].size
,system.timestr(list[i].time).slice(4)));
curpath = path;
}
}
}
function install()
{
var viewable_exts = [
'7z',
'exe',
'bz',
'gz',
'iso',
'lha',
'lzh',
'tbz',
'tgz',
'rar',
'xar',
'zip'
];
var cnflib = load({}, "cnflib.js");
var file_cnf = cnflib.read("file.cnf");
if(!file_cnf) {
alert("Failed to read file.cnf");
exit(-1);
}
for(var e in viewable_exts) {
file_cnf.fview.push({
extension: viewable_exts[e],
cmd: '?archive list %f'
});
}
if(!cnflib.write("file.cnf", undefined, file_cnf)) {
alert("Failed to write file.cnf");
exit(-1);
}
exit(0);
}
......@@ -746,7 +746,7 @@ cmdkey J
end_cmd
cmdkey L
setstr *.*
setstr *
file_list
end_cmd
......
// List files in a Synchronet v3.19 file base directory
"use strict";
var options = { sort: false};
var detail = -1;
var dir_list = [];
var filespec = "";
var props = [];
var fmt;
for(var i = 0; i < argc; i++) {
var arg = argv[i];
if(arg[0] == '-') {
var opt = arg.slice(1);
if(opt[0] == 'v') {
var j = 0;
while(opt[j++] == 'v')
detail++;
continue;
}
if(opt.indexOf("p=") == 0) {
props.push(opt.slice(2));
continue;
}
if(opt == "json") {
fmt = "json";
continue;
}
if(opt == "arc") {
fmt = "arc";
continue;
}
if(opt.indexOf("fmt=") == 0) {
fmt = opt.slice(4);
continue;
}
if(opt == "all") {
for(var dir in file_area.dir)
dir_list.push(dir);
continue;
}
options[opt] = true;
continue;
}
if(file_area.dir[arg])
dir_list.push(arg);
else
filespec = arg;
}
if(props.length < 1)
props = ["name", "size", "from", "desc", "extdesc"];
if(!fmt) {
fmt = "%-13s %10s %-25s %s";
if(detail > 1)
fmt += "\n%s";
}
var output = [];
for(var i in dir_list) {
var dir_code = dir_list[i];
var dir = file_area.dir[dir_code];
if(!dir) {
alert("dir not found: " + dir_code);
continue;
}
if(options.hdr) {
var hdr = format("%-15s %-40s Files: %d", dir.lib_name, dir.description, dir.files);
output.push(hdr);
output.push(format("%.*s", hdr.length
, "-------------------------------------------------------------------------------"));
}
output = output.concat(listfiles(dir_code, filespec, detail, fmt, props));
}
//if(options.sort)
// output.sort();
for(var i in output)
print(output[i]);
function archive_contents(path, list)
{
var output = [];
for(var i = 0; i < list.length; i++) {
var fname = path + list[i];
print(fname);
output.push(fname);
var contents;
try {
contents = Archive(fname).list();
} catch(e) {
// alert(e);
continue;
}
for(var j = 0; j < contents.length; j++)
output.push(contents[j].name + " " + contents[j].size);
}
return output;
}
function listfiles(dir_code, filespec, detail, fmt, props)
{
var base = new FileBase(dir_code);
if(!base.open())
return base.last_error;
var output = [];
if(detail < 0) {
var list = base.get_names(filespec, options.sort);
if(fmt == 'json')
output = JSON.stringify(list, null, 4).split('\n');
else if(fmt == 'arc')
output = archive_contents(file_area.dir[dir_code].path, list);
else
output = list;
} else {
var list = base.get_list(filespec, detail, options.sort);
if(fmt == 'json')
output.push(JSON.stringify(list, null, 4));
else {
for(var i = 0; i < list.length; i ++)
output.push(list_file(list[i], fmt, props));
}
}
base.close();
return output;
}
function list_file(file, fmt, props)
{
if(typeof file == 'string') {
print(file);
return;
}
if(fmt === undefined)
fmt = "%s";
var a = [fmt];
for(var i in props) {
if(file[props[i]] === undefined)
a.push('');
else
a.push(file[props[i]]);
}
return format.apply(this, a);
}
"use strict";
if(argv.indexOf("-help") >= 0 || argv.indexOf("-?") >= 0) {
print("usage: [dir-code] [file-name]");
exit(0);
}
var code = argv[0];
while(!file_area.dir[code] && !js.terminated) {
for(var d in file_area.dir)
print(d);
code = prompt("Directory code");
}
var dir = file_area.dir[code];
var filebase = new FileBase(code);
if(!filebase.open()) {
alert("Failed to open: " + filebase.file);
exit(1);
}
print(JSON.stringify(filebase.hash(argv[1]), null, 4));
print(filebase.last_error);
/*
var name_list = filebase.get_file_names();
*/
\ No newline at end of file
......@@ -3,8 +3,6 @@
// This script generates HTML documentation of the Synchronet JavaScript object model
// Requires a Debug build of the Synchronet executable(s)
// $Id: jsdocs.js,v 1.40 2020/04/20 06:31:15 rswindell Exp $
const table_tag = "<table border=1 width=100%>";
const li_tag = "<li onclick = 'this.className = (this.className == \"showList\") ? \"defaultStyles\" : \"showList\";'\n" +
......@@ -320,7 +318,9 @@ if(js.global.msg_area != undefined) document_object("msg_area" ,msg_area);
if(js.global.file_area != undefined) document_object("file_area" ,file_area);
if(js.global.xtrn_area != undefined) document_object("xtrn_area" ,xtrn_area);
if(js.global.MsgBase != undefined) document_object("MsgBase" ,new MsgBase(msg_area.grp_list[0].sub_list[0].code), "class");
if(js.global.FileBase != undefined) document_object("FileBase" ,new FileBase(file_area.lib_list[0].dir_list[0].code), "class");
if(js.global.File != undefined) document_object("File" ,new File(system.devnull), "class");
if(js.global.Archive != undefined) document_object("Archive" ,new Archive(system.devnull), "class");
if(js.global.Queue != undefined) document_object("Queue" ,new Queue(), "class");
if(js.global.Socket != undefined) {
var sock=new Socket();
......
......@@ -195,6 +195,8 @@ function read_netuser(username, netaddr)
function read(usernum, username, netaddr, bbsid)
{
var usernum = parseInt(usernum, 10);
if(!usernum && !username)
return false;
var obj = cache_get(usernum >= 1 ? usernum : username, netaddr);
if(obj !== undefined) // null and false are also valid cached avatar values
return obj;
......
......@@ -82,9 +82,7 @@ function TickITCfg(fname) {
var dir = file_area.di