#!/usr/bin/perl # # filechucker.cgi # # This program is the copyrighted work of Encodable Industries. # Redistribution is prohibited, and copies are permitted only for # backup purposes. You are free to modify the program for your # own use, but you may not distribute any modified copies of it. # # Use of this program requires a one-time license fee. You can # obtain a license here: # # http://encodable.com/filechucker/#license # # This software comes with no warranty. The author and many other # people have found it to be useful, and it is our hope that you # find it useful as well, but it comes with no guarantees. Under # no circumstances shall Encodable Industries be held liable in # any situation arising from your use of this program. We are # generally happy to provide support to all our users, but we can # make no guarantee of support. # # For more information about this program, as well as for help # and support, please visit the following pages: # # Homepage: http://encodable.com/filechucker/ # Contact: http://encodable.com/contact/ $ENV{PATH} = '/bin:/usr/bin'; delete @ENV{'IFS', 'CDPATH', 'ENV', 'BASH_ENV'}; ($ENV{DOCUMENT_ROOT}) = ($ENV{DOCUMENT_ROOT} =~ /(.*)/); # untaint. my %PREF = (); ### ### User preferences section: adjust these variables ### to suit your own server/setup/tastes. ### ############################################################################ # Title appearing at the top of the page. # $PREF{title} = "FileChucker"; ############################################################################ # Text appearing below the title, before the upload form. # $PREF{intro} = qq`

Upload some files!

`; ############################################################################ # Choose whether the script should display a link to the list of # uploaded files. Note that "members" includes "admins" and # "strangers" includes everyone (i.e. members and admins too). # $PREF{show_link_to_uploads_for_strangers} = 'yes'; $PREF{show_link_to_uploads_for_members} = 'yes'; $PREF{show_link_to_uploads_for_admins} = 'yes'; ############################################################################ # Allow the user to upload multiple files at the same time, up # to this limit. # $PREF{max_files_allowed} = 10; ############################################################################ ### ### The rest of the PREFs are optional and/or unnecessary to ### change most of the time. Try to get the script running ### without changing them first, then adjust them if you want. ### ############################################################################ # Set the maximum size file that can be uploaded. One megabyte # is 1024*1024*1; 5 MB is 1024*1024*5, etc. # $PREF{sizelimit} = 1024*1024*20; ############################################################################ # You can allow or disallow uploads based on file extension. Each of these # two preferences takes a list of extensions separated by commas and/or # spaces. Note that the period must be included with the extension. For # example: # # $PREF{only_allow_these_file_extensions} = '.jpg, .png, .gif'; # # Finally, note that these are not case sensitive. # $PREF{only_allow_these_file_extensions} = ''; $PREF{disallow_these_file_extensions} = '.php .php3 .php4 .php5 .cgi .pl .sh .py .htaccess .htpasswd'; $PREF{allow_files_without_extensions} = 'yes'; ############################################################################ # In both filenames and directory names, convert spaces to underscores, # and remove anything that's not in the set [0-9A-Za-z._-]. # $PREF{clean_up_filenames} = 'no'; ############################################################################ # By default, if you upload a file that's 1 megabyte or bigger, # the file sizes and upload rate will be in MB and MB/s. If # you want to force them to always be in KB instead, set these. # $PREF{force_KB_for_size_display} = 'no'; $PREF{force_KB_for_transfer_rate_display} = 'yes'; ############################################################################ # Options for controlling the display of the uploaded-files list. # You can set the unit, number of decimal places, and the color # that the rows turn to when you hover the mouse over them. You # can also specify how the date should be displayed. Google for # "unix man pages: date (1)" (or just run "man date" on any Unix # system) for more information on the date format. # $PREF{unit_for_size_display_in_uploaded_files_list} = 'KB'; # can be KB, MB, or mixed. $PREF{num_decimals_for_uploaded_files_list_sizes} = 0; $PREF{display_shortened_filename_if_longer_than} = 40; # characters. $PREF{filelist_row_hover_bgcolor} = '#e9e9e9'; $PREF{filelist_row_normal_bgcolor} = 'transparent'; $PREF{date_format_for_filelist} = '%b%d, %I:%M%P'; #$PREF{date_format_for_filelist} = '%Y-%m-%d, %I:%M %P'; #$PREF{date_format_for_filelist} = '%a %b %d, %Y, %H:%M'; #$PREF{date_format_for_filelist} = '%I:%M %P %b %d %Y'; $PREF{only_show_files_with_these_extensions} = ''; $PREF{hide_files_with_these_extensions} = '.php .php3 .php4 .php5 .cgi .pl .sh .py .htaccess .htpasswd'; ############################################################################ # By default, when an upload begins, the progress bar pops out of nowhere # and displays below the "Begin Upload" button. If you want, you can set # this preference so that it clears the rest of the page first, so that the # progress bar & table are the only thing on the screen during the upload. # $PREF{clear_page_during_upload} = 'yes'; ############################################################################ # Password hashes (optional): if you want to require a password # for access to the uploader and/or the list of uploaded files, # you need to set these. Go to: # # yoursite.com/cgi-bin/upload/filechucker.cgi?makePasswordHash # # ...enter the password you want to use into that page, and it # will generate a "hash" of the password, which is a string that # looks something like this: # # cdfc81932491375c34c842bcebc7dc15 # # Copy and paste the hash into one of the following preferences. # Then when you want to log in, enter the password, not the hash. # (This is so that we don't store the actual password on disk, which # would be very insecure.) # # We specify two possible user-levels: member and admin. If you # want, you can use just one of them, and have a single password # for both uploading and viewing the file-list. Or you can specify # both, and set the "must_be_" preferences accordingly, so that only # the admin can view the uploaded files. Or vice-versa. Or you # could require no password to view the file-list, but require one # to upload. Etc, etc. Just set the prefs accordingly. # # Note that the admin is automatically a "member" too, so someone # with the admin password automatically has access to anything that # requires the member password. # # Finally, note that to delete uploaded files, you must be logged # in as admin. So you probably at least want to create the admin # password hash, even if you don't set any of the upload/list prefs # to yes. # $PREF{member_password_hash} = ''; $PREF{admin_password_hash} = '2bac61f75416e291f332dbf88e63c5db'; $PREF{must_be_member_to_upload} = 'no'; $PREF{must_be_admin_to_upload} = 'no'; $PREF{must_be_member_to_list_files} = 'no'; $PREF{must_be_admin_to_list_files} = 'no'; $PREF{must_be_member_to_view_upload_info} = 'no'; $PREF{must_be_admin_to_view_upload_info} = 'no'; $PREF{must_be_member_to_move_items} = 'no'; $PREF{must_be_admin_to_move_items} = 'no'; $PREF{must_be_member_to_make_folder_thru_fileman} = 'no'; $PREF{must_be_admin_to_make_folder_thru_fileman} = 'no'; ############################################################################ # Once you allow someone to download a file from your uploads area, # they will know the path to all your uploads. If you don't want # them to be able to see all the other files by just visiting that # directory's address, you'll need to put a .htaccess file in that # directory with the line "Options -Indexes" (without quotes). # However, as long as they know the address, they can still try to # guess filenames that might be in there. As an extra security # precaution, you can set serialize_all_uploads, which adds a long # pseudo-random number to each filename, making it virtually # impossible that someone could guess the name of a file in the # directory. # $PREF{serialize_all_uploads} = 'no'; ############################################################################ # Even if you don't want to serialize ALL uploads, it can be useful to # automatically serialize an upload if a file with the same name already # exists in the uploads directory. If you'd rather have files get over- # written when a new file with the same name is uploaded, then set this. # $PREF{overwrite_existing_files} = 'no'; ############################################################################ # You can have a datestamp (YYYYMMDD-HHMM) added to the filename for # each upload. It will be added to the end, right before the extension. # $PREF{datestamp_all_uploads} = 'no'; ############################################################################ # FileChucker can automatically rename every file that gets uploaded, # based on a format that you specify here. Note that for this PREF, # the part after the equal sign must be enclosed in single-quotes # (most of the PREFs are that way anyway, even though double-quotes # would work too; but for this one they MUST be single-quotes). # # You can use the following variables in your filename formatting: # # $o - original filename from the user's computer (without extension) # # $e - extension from original filename (without leading period) # # $u - name of user's private directory (null unless you've # enabled the userdir PREFs below) # # $n - sequence number of this file within its original upload # (set to 1 if only a single file was uploaded) # # You can also use variables of the form %v to insert date/time # values based on the standard date formatting variables; Google for # "unix man pages: date (1)" (or just run "man date" on any Unix # system) for more information on the date format. # # Finally, you can include variables from either the URL or from a # cookie by using $URL{varname} or $COOKIE{cookiename}. # # And anything not preceded by a dollar-sign or a percent-sign will be # passed straight through as literal text. # # For example, if you set: # # $PREF{reformat_filenames_for_all_uploads} = '%Y%m%d-$u_$n-$COOKIE{foo}-$o.$e'; # # ...and then a user uploads 3 files, on say Feb 28 2006, then they # will be renamed to: # # 20060228-userdirname_1-fooCookieValue-originalfilename.ext # 20060228-userdirname_2-fooCookieValue-originalfilename.ext # 20060228-userdirname_3-fooCookieValue-originalfilename.ext # # Finally note that to disable this, you must comment it out or set # it to null (''). # $PREF{reformat_filenames_for_all_uploads} = ''; ############################################################################ # Create an info file for each upload, containing info like the uploader's # IP/hostname/user-agent, timestamps, elapsed time, etc. # $PREF{store_info_about_each_upload} = 'yes'; ############################################################################ # Allow the uploader to enter comments about the file(s) they are # uploading. There will be one comments box per file. Note that # this requires you to enable the store_info_about_each_upload PREF. # $PREF{display_comments_box_for_uploads} = 'no'; $PREF{comments_box_intro} = 'Enter any comments/info about this file:'; $PREF{comments_box_size} = '350x45'; # in pixels. ############################################################################ # Some servers seem to not set $ENV{DOCUMENT_ROOT} properly (in one case, # for users who serve pages from their home directories), so we'll make # our own version. Most of the time it should be set to the doc-root, # but if necessary you can adjust this here. # $PREF{DOCROOT} = $ENV{DOCUMENT_ROOT}; ############################################################################ # This is where the uploaded files will go. It must be world-readable # and world-writable, aka "chmod a+rwx" or "chmod 0777". Set this to # "/dev/null" if you want the files to not be saved at all. # $PREF{uploaded_files_dir} = '/uploads'; ############################################################################ # The previous 'uploaded_files_dir' preference can specify one of three # types of directories: # # - absolute: for example /home/anthony/uploaded_files/ on Unix systems, # or c:\uploaded_files on Windows systems. # # - absolute_within_docroot: if you set uploaded_files_dir to '/uploaded_files' # and your DOCROOT is /var/www then it will be at /var/www/uploaded_files. # Or if your DOCROOT is c:\inetpub\wwwroot then it will be at # c:\inetpub\wwwroot\uploaded_files. # # - relative: the directory will be located in the same place as this script, # so if you put the script at /var/www/cgi-bin/uploads/filechucker.cgi then # the directory will be at /var/www/cgi-bin/uploads/uploaded_files. # $PREF{uploaded_files_dir_type} = 'absolute_within_docroot'; ############################################################################ # If you set uploaded_files_dir_type to absolute, then we won't be able to # link to the files from your website, because they aren't in the DOCROOT. # That's probably not what you wanted, but if it really is, and you're doing # something like manually moving the files to somewhere else within your # DOCROOT later, then enter that path here. Otherwise just leave this # commented out. # #$PREF{uploaded_files_urlpath} = ''; ############################################################################ # This is where logfiles are stored. This must be world-readable and # world-writable. But if you're going to enable the database-backend option # (see below), then you can skip the logpath PREFs. # $PREF{logpath} = 'logs'; ############################################################################ # The previous 'logpath' preference can specify one of three # types of directories: # # - absolute: for example /home/anthony/logs/ on Unix systems, # or c:\logs on Windows systems. # # - absolute_within_docroot: if you set logpath to '/logs' # and your DOCROOT is /var/www then it will be at /var/www/logs. # Or if your DOCROOT is c:\inetpub\wwwroot then it will be at # c:\inetpub\wwwroot\logs. # # - relative: the directory will be located in the same place as this script, # so if you put the script at /var/www/cgi-bin/uploads/filechucker.cgi then # the directory will be at /var/www/cgi-bin/uploads/logs. # $PREF{logpath_type} = 'relative'; ############################################################################ # The logfiles are just used during each upload, to keep track of # how much data has been sent, how much time has elapsed, etc. # They aren't used at all after an upload has completed, so you # probably want them deleted right away. Note that these are # different from the info-files which tell you who uploaded what. # $PREF{delete_logfiles_immediately} = 'yes'; ############################################################################ # If you're using SSI: # () # or a PHP include: # () # to display this script at a shorter URL (like mysite.com/upload/ instead # of mysite.com/cgi-bin/upload/filechucker.cgi) then enter that shorter URL # here. Otherwise leave it set to $ENV{SCRIPT_NAME}. # $PREF{here} = $ENV{SCRIPT_NAME}; ############################################################################ # You can have the script send you an email whenever a new upload happens. # It can be sent to as many recipients as you want. Most of the email PREFs # are fairly self-explanatory, but here are a few notes: the sender address # doesn't have to be a real address, but it DOES have to end with a real # domain name. The smtp server is probably localhost or mail.yoursite.com, # and we'll try both smtp and sendmail when trying to send an email. The # email type can be either html or text, and the failure action can be # either die_on_email_error or null, in which case we'll just ignore the # error. # $PREF{send_email_notifications} = 'no'; $PREF{email_notification_recipient_1} = 'me@mysite.com'; $PREF{sender_email_address} = 'filechucker@mysite.com'; $PREF{smtp_server} = 'localhost'; $PREF{path_to_sendmail} = '/usr/sbin/sendmail'; $PREF{email_type} = 'html'; $PREF{email_subject} = "New upload on mysite.com"; $PREF{email_failure_action} = 'die_on_email_error'; ############################################################################ # This prints some debugging output in an HTML comment at the bottom of the # page, if you pass "debug" as the query-string. Don't enable it unless # you can't get the script to work and you want to try and figure out why. # $PREF{enable_debug} = 'no'; ############################################################################ # Enable this when you're first trying to get the script running; disable # it after the script is working properly on your server. # $PREF{show_errors_in_browser} = 'yes'; ############################################################################ # By default, if your server's version of the CGI.pm module is >= 3.03, # then we'll use its upload hook. If it's older than 3.03, we'll do it # manually, since older versions don't support the upload hook. This # means we can't show the counts for number of files completed/remaining, # but if your server has an ancient version of the CGI.pm module, then # you have no choice. Anyway, this PREF is in case the script isn't # working for you, even though you DO have v3.03 or newer. Setting this # to 'yes' will force the manual behavior, and the only loss will be the # aforementioned files-completed/files-remaining; the time and size will # still display properly. # $PREF{disable_upload_hook} = 'no'; ############################################################################ # FileChucker can use a database backend instead of diskfiles to store # its temporary working data, if you'd like. There's really no benefit # of one method over the other (and on servers that perform write-caching, # thus preventing the progress bar from working properly, the database # backend doesn't actually work around the problem, as we had thought it # might). # $PREF{use_database_backend} = 'no'; ############################################################################ # The database name can optionally also have a :hostname after the # database-name. Here are two commented-out examples for reference: # # $PREF{dbname} = 'mydbname'; # $PREF{dbname} = 'mydbname:mydbhost'; # #$PREF{dbname} = 'mydbname:mydbhost'; ############################################################################ # Your database username and password. It needs privileges for INSERT, # UPDATE, SELECT, DELETE, and CREATE. If you're paranoid and don't want # to give it CREATE privs, then see the create_db_table() function to find # out the column types, so you can create the table manually. # $PREF{dbuser} = ''; $PREF{dbpass} = ''; ############################################################################ # The name of our table in your database. # $PREF{enc_uploader_table_name} = 'enc_upload_info'; ############################################################################ # You probably don't want the database entries filling up your DB since # they don't actually contain any useful information after the upload is # complete. But you can disable this if for some reason you want them kept. # Note that this doesn't delete the uploaded files; they aren't stored in # the database. # $PREF{delete_database_entires} = 'yes'; ############################################################################ # Allow users to upload to subdirectories if they want. # $PREF{enable_subdirs} = 'yes'; ############################################################################ # Allow users to create new subdirectories (but only within your # uploaded_files_dir, of course). # $PREF{enable_new_subdir_creation} = 'yes'; $PREF{max_num_of_subdir_levels} = 4; $PREF{max_length_of_new_subdir_names} = 25; ############################################################################ # Only allow each user to access their own upload directory. The user's # directory is determined by the preferences you set below; it will either # come from ?userdir=foo on the URL, or from a username cookie that's set # by your site's existing login framework. If you don't want any kind of # username-based directories, and you want all users to be able to upload # to wherever they want, then don't set this. # $PREF{enable_username_based_subdirs} = 'no'; ############################################################################ # If you're dropping FileChucker into an existing system with lots of # users, you may not want to manually create a user subdirectory for each # one. In that case you can enable this, and then anyone who visits with # ?userdir=foo or with foo in their username cookie will cause the username # directory to be created automatically. # $PREF{auto_create_userdirs} = 'no'; ############################################################################ # Enable ?userdir=username (or ?userdir=whatever) on the URL to automatically # select the upload subdirectory. Note that this is probably insecure and # you should use enable_userdir_from_cookie instead. # $PREF{enable_userdir_on_url} = 'no'; ############################################################################ # If you want to use ?userdir=username (or ?userdir=whatever) on the URL, to # automatically select the directory into which the user's files will be # uploaded, then you may also want to set url_without_userdir_is_error, so # that sneaky users can't try to manually manipulate the URL and remove the # username, gaining access to all the other users' upload directories. # $PREF{url_without_userdir_is_error} = 'yes'; ############################################################################ # If you want per-username upload subdirectories, and you already have some # kind of login framework in place on your site that stores usernames in # cookies, then you can use this to automatically choose the right subdir # based on the value in the cookie. This means that each user will only # be able to view and upload to his own subdirectory within your # uploaded_files_dir. # $PREF{enable_userdir_from_cookie} = 'no'; $PREF{userdir_cookie_name} = 'username'; ############################################################################ # After a successful upload, we normally display a page that says # "upload complete" and lists the uploaded files, their sizes, etc. # If you want, you can redirect to some other page instead. This # value MUST start with http:// or https:// and it MUST be enclosed # in single-quotes, not double-quotes. Note that you can include # variables in the value from either the URL or from a cookie by # using $URL{varname} or $COOKIE{cookiename}. For example: # # $PREF{after_upload_redirect_to} = 'http://mysite.com/foo/bar/?baz=$URL{baz}&bim=$COOKIE{bim}'; # $PREF{after_upload_redirect_to} = ''; ############################################################################ ### ### End of user preferences section. You probably don't want to mess with ### anything below here unless you really know what you're doing. ### ############################################################################ my $version = "2.55t"; if($ENV{QUERY_STRING} eq 'version') { print "Content-type: text/plain\n\n"; print "$version\n"; exit; } my ($cwd) = ($ENV{SCRIPT_FILENAME} =~ m!^(.+)/.*?$!); chdir $cwd; $| = 1; use strict; #use warnings; if($PREF{show_errors_in_browser} =~ /yes/i) { use CGI::Carp 'fatalsToBrowser'; } use Fcntl; use CGI; use POSIX; use CGI qw/:standard/; eval { require DBI; }; die "$0: $@\n" if $@ && $PREF{use_database_backend} =~ /yes/i; eval { require IO::Socket; }; die "$0: $@\n" if $@ && $PREF{use_ipc_backend} =~ /yes/i; my $qs = $ENV{QUERY_STRING}; load_prefs(); my $output_started = 0; my $starttime = time; my $total_upload_size = (); my $errorlogfh = (); my %temp = (); my $num_files_in_progress_or_done = 0; my $total_file_count = $qs =~ /(?:^|&)items=(\d+)(?:&|$)/ ? $1 : 1; my $dbh = $PREF{use_database_backend} =~ /yes/i ? get_db_connection() : ''; my $ampm = lc(strftime("%p", localtime(time))); my $shortdatetime = strftime("%a%b%d,%Y,%I:%M", localtime(time)).$ampm; my $shortdatetime_forfilename = strftime("%a%b%d,%Y,%Hh%Mm%Ss", localtime(time)).$ampm; my $datestring8 = strftime("%Y%m%d", localtime(time)); $CGI::POST_MAX = $PREF{sizelimit} =~ /^\d+$/ ? $PREF{sizelimit} : 1024 * 1024 * 3; my (%DLOG, %TOLDPROGRESSSERVER, $dlog) = (); #open($dlog, ">>logs/fcdebug.log") or die "$0: couldn't open debug log for appending: $!\n"; if($qs =~ /serial=(\d+)&action=get_progress_and_size/) { print "Cache-Control: no-store, no-cache\n"; print "Content-type: text/xml\n\n"; my ($progress,$currentfile,$totalfiles,$size,$elapsedtime) = get_progress_and_size($1); if($progress eq 'ENOLOG') { print "ERROR: the log file hasn't been created yet; probably your server is doing some write-caching so the log doesn't get created when we create it -- it actually gets created AFTER the upload is complete, making progress reporting impossible."; exit; } elsif($progress eq 'ENORAWPOST') { print "ERROR: the rawpost file hasn't been created yet; probably your server is doing some write-caching so the file doesn't get created when we create it -- it actually gets created AFTER the upload is complete, making progress reporting impossible."; exit; } my $toobig = $size > $CGI::POST_MAX ? '|toobig' : ''; my $finished_file_count = $currentfile ? $currentfile - 1 : 0; my $output = "$progress|$size|$elapsedtime|$finished_file_count|$total_file_count" . $toobig; print $output; } elsif($qs =~ /(?:^|&)action=listfiles(?:&|$)/) { list_uploaded_files(); } elsif($qs eq 'makePasswordHash') { make_password_hash(); } elsif($qs =~ /(?:^login$|action=login&target=(.+?)(&|$))/) { do_login($1); } elsif($qs eq 'logout') { do_logout(); } #elsif($qs eq 'get_logo') #{ # my $logo = get_logo(); # print "Content-type: image/png\n\n"; # print `uudecode -o /dev/stdout $logo`; #} elsif($qs =~ /action=delete(?:&path=(.*?))?&(file|dir)=(.+?)(&really=yes)?(?:&|$)/) { delete_item($1,$2,$3,$4); } elsif($qs =~ /action=(move|rename)&(file|folder)=(.+?)&src=(.*?)(?:&dst=(.+?))?(?:&|$)/) { move_item($1,$2,$3,$4,$5); } elsif($qs =~ /action=fileinfo&path=(.*?)&file=(.+?)(?:&|$)/) { show_fileinfo($1, $2); } elsif($qs =~ /action=mkdir(?:&path=(.+?)(?:&dirname=(.+))?)?(?:&|$)/) { make_dir($1,$2); } elsif($ENV{REQUEST_METHOD} =~ /post/i) { process_upload(); } else { print_new_upload_form(); } sub print_new_upload_form() { do_authentication('upload','redirect'); start_html_output('Upload a file', 'css', 'js'); my $numitems = $qs =~ /(?:^|&)items=(\d+)(?:&|$)/ ? $1 : 1; $numitems = 1 if $numitems > $PREF{max_files_allowed}; print qq`
`; print qq`
$PREF{intro}
\n\n` if $PREF{intro}; print qq`
Number of files to upload:    
`; my @dirs = sort { lc($a) cmp lc($b) } ( get_all_subdirs($PREF{uploaded_files_realpath}) ); for(my $i=1; $i<=$numitems; $i++) { my $row = ($i % 2) ? 'odd' : 'even'; print qq`
File $i of $numitems:
\n`; if($PREF{enable_subdirs} =~ /yes/i) { print qq`
Upload to:
\n\n`; if($PREF{enable_new_subdir_creation} =~ /yes/i) { print qq`
New subdirectory? Name:
\n
(will be created inside the selected directory)
\n`; } if($PREF{display_comments_box_for_uploads} =~ /yes/i && $PREF{store_info_about_each_upload} =~ /yes/i) { print qq`
$PREF{comments_box_intro}
\n`; } } print qq`
\n\n\n`; } print qq`
uploading data at: ?
FilesSizeTime
Total $total_file_count ? ??:??:??
Completed 0 0 00:00:00
Remaining $total_file_count ? ??:??:??
`; print_footer_links('home', 'list', 'logout', 'login'); print_powered_by(); finish_html_output(); } sub hook { my ($current_filename, $buffer, $bytes_read, $logfh) = @_; my $current_file_has_been_logged = 0; my ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = (); my $serial = $PREF{serial}; my @logcontents = (); # We're still the original process that's accepting the upload, so # we don't need to ask the backend for this now, we can store it # in a hash for easier retrieval: # $progress = $PREF{uploaddata}{$serial}{progress}; $currentfile = $PREF{uploaddata}{$serial}{currentfile}; $totalfiles = $PREF{uploaddata}{$serial}{totalfiles}; $totalsize = $PREF{uploaddata}{$serial}{totalsize}; $start_time = $PREF{uploaddata}{$serial}{start_time}; # There are three possibilities here: # # 1. $current_filename has already been logged (i.e. it's in @allfiles) # and its size has either gone up, or stayed the same; # # 2. $current_filename is in @allfiles but its size appears to have gone # down, meaning the user has uploaded two files that have the same # filename, so we'll handle this with if(!$current_file_has_been_logged); # # 3. $current_filename is NOT in @allfiles, which we'll also handle with # if(!$current_filename_has_been_logged). my $new_progress = (); my (@allfiles) = split(m!///!, $progress); for(@allfiles) { if(/(.+)=(\d+)$/) { my ($file,$old_progress) = ($1,$2); if($file eq $current_filename && $bytes_read >= $old_progress) { $new_progress .= "${current_filename}=${bytes_read}"; $current_file_has_been_logged = 1; } else { $new_progress .= "${file}=${old_progress}"; } $new_progress .= "///"; } } if(!$current_file_has_been_logged) { unless(!$current_filename || $bytes_read !~ /^\d+$/) { $new_progress .= "${current_filename}=${bytes_read}"; $num_files_in_progress_or_done++; } } # Update our hash for the next time hook() is called. We'll still update # the backend below, so the client can get the info too. # $PREF{uploaddata}{$serial}{progress} = $new_progress; $PREF{uploaddata}{$serial}{currentfile} = $num_files_in_progress_or_done; $PREF{uploaddata}{$serial}{totalfiles} = $total_file_count; $PREF{uploaddata}{$serial}{totalsize} = $total_upload_size; $PREF{uploaddata}{$serial}{start_time} = $starttime; if($PREF{use_database_backend} =~ /yes/i) { my $sth = $dbh->prepare("UPDATE $PREF{enc_uploader_table_name} SET progress='$new_progress', currentfile='$num_files_in_progress_or_done', totalfiles='$total_file_count', totalsize='$total_upload_size', start_time='$starttime' WHERE serial='$PREF{serial}';"); $sth->execute or die "$0: $DBI::errstr\n"; } elsif($PREF{use_ipc_backend} =~ /yes/i) { #print STDERR "hook(): new_progress: $new_progress\n"; #print $progress_server_fh "$new_progress,$num_files_in_progress_or_done,$total_file_count,$total_upload_size,$starttime\n"; my $timenow = time; unless($TOLDPROGRESSSERVER{$timenow}) { # throttle this to once per second to give the ask_progress_server() a chance to get in there. $TOLDPROGRESSSERVER{$timenow} = 1; tell_progress_server($serial, "$new_progress,$num_files_in_progress_or_done,$total_file_count,$total_upload_size,$starttime"); } } elsif($PREF{use_single_log_backend} =~ /yes/i) { my $updated_line = "${serial}:|:${new_progress}:|:${num_files_in_progress_or_done}:|:${total_file_count}:|:${total_upload_size}:|:${starttime}\n"; #print STDERR "hook(): writing updated line: $updated_line"; seek $logfh, 0, 0; # seek to the beginning again, before we start writing. print $logfh @logcontents; print $logfh $updated_line; truncate $logfh, tell $logfh; # truncate the file (on the off chance that the new size is less than the old) flock $logfh, 8; # release the lock } else { seek $logfh, 0, 0; # seek to the beginning again, before we start writing. print $logfh "${new_progress}:|:${num_files_in_progress_or_done}:|:${total_file_count}:|:${total_upload_size}:|:${starttime}\n"; # print the static info truncate $logfh, tell $logfh; # truncate the file (on the off chance that the new size is less than the old) flock $logfh, 8; # release the lock } } sub enc_untaint { my $item = shift; my $original_item = $item; my $keep_path = shift; #print STDERR "enc_untaint($item)\n"; # Regardless of whether we're keeping the path, dots surrounded by slashes are never allowed. # #$item =~ s!(^|/|\\)\.+(/|\\|$)!$1!g; while($item =~ m!((?:^|/|\\)\.+(?:/|\\|$))!) { $item =~ s!$1!/!; } #print STDERR "removed slashdots: $item\n"; if( $item =~ m!(/|\\)! && !$keep_path) { $item =~ s!^.*[/\\]+([^/\\]+)!$1!; # remove any path from the front. #print STDERR "removed path from front: $item\n"; $item =~ s!^([^/\\]+)[/\\]+!$1!; # ...and the back. } $item =~ s![`\*\?\|<>]!!g; # remove some other potentially-unsafe stuff. $item =~ s![/\\]{2,}!/!g; # condense any multiples. ($item) = ($item =~ /(.*)/); # untaint. # In case anything slips through, die as a security precaution. # die qq`$0: couldn't untaint "$original_item".\n` if $item =~ m![/\\]! && !$keep_path; die qq`$0: couldn't untaint "$original_item".\n` if $item =~ m!(?:^|/|\\)\.+(?:/|\\|$)!; die qq`$0: couldn't untaint "$original_item".\n` if $item =~ m!^\.+$!; die qq`$0: couldn't untaint "$original_item".\n` if $item =~ m!^\s*$!; #print STDERR "untainted: $item\n\n"; return $item; } sub get_progress_and_size { printd(qq`starting get_progress_and_size()\n`); do_authentication('progress','redirect'); my $serial = shift; $serial = enc_untaint($serial); my ($progress,$currentfile,$totalfiles,$totalprogress,$totalsize,$start_time,$elapsedtime) = (); if($PREF{using_upload_hook}) { if($PREF{use_database_backend} =~ /yes/i) { my $sth = $dbh->prepare("SELECT progress,currentfile,totalfiles,totalsize,start_time FROM $PREF{enc_uploader_table_name} WHERE serial='$serial';"); $sth->execute; ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = $sth->fetchrow; } elsif($PREF{use_ipc_backend} =~ /yes/i) { ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = ask_progress_server($serial); } elsif($PREF{use_single_log_backend} =~ /yes/i) { my $logfile = "$PREF{logpath}/log.log"; open(READLOGFILE,"<$logfile") or die "$0: couldn't open $logfile for reading: $!\n"; flock READLOGFILE, 1; seek READLOGFILE, 0, 0; while() { chomp; if(/^${serial}:|:(\d+):|:(\d+):|:(\d+):|:(\d+):|:(\d+)$/) { ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = ($1,$2,$3,$4,$5); last; } } close READLOGFILE or die "$0: couldn't close $logfile after reading: $!\n"; } else { my $logfile = "$PREF{logpath}/$serial.log"; if(-e $logfile) { open(READLOGFILE,"<$logfile") or die "$0: couldn't open $logfile for reading: $!\n"; flock READLOGFILE, 1; seek READLOGFILE, 0, 0; my $line = ; chomp $line; close READLOGFILE or die "$0: couldn't close $logfile after reading: $!\n"; ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = split(/:\|:/, $line); } else { return 'ENOLOG'; } } my (@allfiles) = split(m!///!, $progress); for(@allfiles) { my ($file,$progress) = (/(.+)=(\d+)$/); $totalprogress += $progress; } $elapsedtime = time - $start_time; } else { # If we're not using the upload hook from CGI.pm, then we can't detect # the file boundaries within the raw post data, which means we can't # display the info for files total/completed/remaining. So we just # need the totalsize, already-uploaded-size, and starttime/elapsedtime # here. if($PREF{use_database_backend} =~ /yes/i) { my $sth = $dbh->prepare("SELECT progress,currentfile,totalfiles,totalsize,start_time FROM $PREF{enc_uploader_table_name} WHERE serial='$serial';"); $sth->execute; ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = $sth->fetchrow; ($totalprogress) = ($progress =~ /.+=(\d+)/); } elsif($PREF{use_ipc_backend} =~ /yes/i) { ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = ask_progress_server($serial); ($totalprogress) = ($progress =~ /.+=(\d+)/); } elsif($PREF{use_single_log_backend} =~ /yes/i) { my $logfile = "$PREF{logpath}/log.log"; open(READLOGFILE,"<$logfile") or die "$0: couldn't open $logfile for reading: $!\n"; flock READLOGFILE, 1; seek READLOGFILE, 0, 0; while() { chomp; if(/^${serial}:|:(\d+):|:(\d+):|:(\d+):|:(\d+):|:(\d+)$/) { ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = ($1,$2,$3,$4,$5); last; } } close READLOGFILE or die "$0: couldn't close $logfile after reading: $!\n"; ($totalprogress) = ($progress =~ /.+=(\d+)/); } else { opendir(my $dirh, $PREF{logpath}) or die "$0: couldn't read directory $PREF{logpath}: $!\n"; my (@rawposts) = grep { /^$serial\.CL_\d+\.ST_\d+\.rawpost$/ } readdir($dirh); close $dirh or warn "$0: couldn't close directory $PREF{logpath}: $!\n"; #FIXME: why doesn't this close properly? my $rawpost = $rawposts[0]; return 'ENORAWPOST' unless -e "$PREF{logpath}/$rawpost"; ($totalsize,$start_time) = ($rawpost =~ /^$serial\.CL_(\d+)\.ST_(\d+)\.rawpost$/); $totalprogress = -s "$PREF{logpath}/$rawpost"; } $elapsedtime = time - $start_time; ($currentfile,$totalfiles) = (1,1); } return ($totalprogress,$currentfile,$totalfiles,$totalsize,$elapsedtime); } sub tainted { return ! eval { eval("#" . substr(join("", @_), 0, 0)); 1 }; } sub process_upload() { printd( qq`010: starting process_upload()` ); do_authentication('upload','redirect'); ($PREF{serial}) = ($qs =~ /(?:^|&)serial=(\d+)(?:&|$)/); $PREF{serial} = enc_untaint($PREF{serial}); my $serial = $PREF{serial}; $total_upload_size = $ENV{CONTENT_LENGTH}; my ($logfile,$logfh) = (); # We'll use this hash in the main/parent/original-getting-POSTed-to process, # so we never need to read from the backend, only write to it. # $PREF{uploaddata}{$serial}{progress} = 0; $PREF{uploaddata}{$serial}{currentfile} = $num_files_in_progress_or_done; $PREF{uploaddata}{$serial}{totalfiles} = $total_file_count; $PREF{uploaddata}{$serial}{totalsize} = $total_upload_size; $PREF{uploaddata}{$serial}{start_time} = $starttime; if($PREF{use_database_backend} =~ /yes/i) { my $sth = $dbh->prepare("INSERT INTO $PREF{enc_uploader_table_name} (serial,progress,currentfile,totalfiles,totalsize,start_time) VALUES('$PREF{serial}', '0', '$num_files_in_progress_or_done', '$total_file_count', '$total_upload_size', '$starttime');"); $sth->execute or die "$0: $DBI::errstr\n"; } elsif($PREF{use_ipc_backend} =~ /yes/i) { spawn_progress_server(); tell_progress_server($serial,"0,$num_files_in_progress_or_done,$total_file_count,$total_upload_size,$starttime"); } elsif($PREF{use_single_log_backend} =~ /yes/i) { $logfile = "$PREF{logpath}/log.log"; printd( qq`011: about to sysopen() logfile $logfile` ); # create the file if necessary. open($logfh, ">$logfile") or die "$0: couldn't create logfile $logfile for R/W: $!\n"; close $logfh or die "$0: couldn't write new (empty) file $logfile to disk: $!\n"; open($logfh, "+<$logfile") or die "$0: couldn't create logfile $logfile for R/W: $!\n"; select((select($logfh), $| = 1)[0]); flock $logfh, 2; seek $logfh, 0, 2; # seek to end my $firstline = "$PREF{serial}:|:0:|:${num_files_in_progress_or_done}:|:${total_file_count}:|:${total_upload_size}:|:$starttime"; print $logfh $firstline; flock $logfh, 8; printd( qq`015: wrote first line to logfile` ); printd( qq`016: firstline: $firstline` ); printd( qq`017: unlocked logfile` ); } else { $logfile = "$PREF{logpath}/$PREF{serial}.log"; printd( qq`011: about to sysopen() logfile $logfile` ); sysopen($logfh, $logfile, O_RDWR | O_EXCL | O_CREAT) or die "$0: couldn't create logfile $logfile for R/W: $!\n"; # RDWR=R/W, EXCL=die if already exists, CREAT=create if DNE. select((select($logfh), $| = 1)[0]); flock $logfh, 2; # Try closing it right away and re-opening it, to see if this fixes the problems # some people are having with the logfile not getting created till the upload is # complete. close $logfh or die "$0: couldn't write new (empty) file $logfile to disk: $!\n"; sysopen($logfh, $logfile, O_RDWR) or die "$0: couldn't open $logfile for R/W: $!\n"; select((select($logfh), $| = 1)[0]); flock $logfh, 2; seek $logfh, 0, 0; my $firstline = "0:|:${num_files_in_progress_or_done}:|:${total_file_count}:|:${total_upload_size}:|:$starttime"; print $logfh $firstline; truncate $logfh, tell $logfh; # unlikely but just in case. flock $logfh, 8; printd( qq`015: wrote first line to logfile` ); printd( qq`016: firstline: $firstline` ); printd( qq`017: unlocked logfile` ); } my ($query,$rawpost) = (); if($PREF{using_upload_hook} =~ /yes/i) { $query = CGI->new(\&hook,$logfh); } else { # Receive the upload data manually and save it to a temporary file, # rather than using "my $query = CGI->new(\&hook,$logfh);" , so # that we can function on servers whose CGI.pm is too old to support # the upload hook functionality. We'll still use CGI.pm to parse # the post-data afterwards. # $rawpost = "$PREF{logpath}/$PREF{serial}.CL_${total_upload_size}.ST_" . time . ".rawpost"; $rawpost = enc_untaint($rawpost,'keep_path'); sysopen(my $upfh, $rawpost, O_RDWR | O_EXCL | O_CREAT) or die "$0: couldn't create $rawpost for R/W: $!\n"; flock $upfh, 2; # Try closing it right away and re-opening it, to see if this fixes the problems # some people are having with the rawpost not getting created till the upload is # complete. close $upfh or die "$0: couldn't write new (empty) file $rawpost to disk: $!\n"; sysopen(my $upfh, $rawpost, O_RDWR) or die "$0: couldn't open $rawpost for R/W: $!\n"; flock $upfh, 2; seek $upfh, 0, 0; select((select($upfh), $| = 1)[0]); my ($bytes_uploaded_so_far, $chunk) = (); while( ($bytes_uploaded_so_far < $total_upload_size) && ($bytes_uploaded_so_far += read(STDIN, $chunk, 8192)) ) { select(undef, undef, undef, 0.3); # sleep for 300ms (see "perldoc -f select") print $upfh $chunk; hook('dummy_filename_for_nonhook_version.foo', undef, $bytes_uploaded_so_far, $logfh); } truncate $upfh, tell $upfh; close $upfh or die "$0: couldn't write post-data to file $rawpost: $!\n"; # Re-open it on STDIN so that CGI.pm can process it. open(STDIN,"<$rawpost") or die "$0: couldn't open post-data file $rawpost on STDIN: $!\n"; flock STDIN, 1; seek STDIN, 0, 0; $query = new CGI(); } if($ENV{CONTENT_LENGTH} > $CGI::POST_MAX) { print "Content-type: text/plain\n\n"; print "ERROR: you tried to send $ENV{CONTENT_LENGTH} bytes,\nbut the current limit is $CGI::POST_MAX bytes.\nPlease go back and choose a smaller file.\n"; if($rawpost) { close STDIN or warn "$0: couldn't close STDIN (opened on file $rawpost): $!\n"; unlink $rawpost or die "$0: couldn't unlink $rawpost: $!\n"; } exit; } my (%output, %files_left_blank_by_user, $at_least_one_file_successfully_uploaded) = (); my $numitems = $query->param('numitems'); my $f = $ENV{chr(72).chr(84).chr(84).chr(80).'_'.chr(72).chr(79).chr(83).chr(84)}; $f =~ s/^w{3}\.//i; $f =~ s/:\d+$//i; if($f =~ /^([a-zA-Z0-9]).*([a-zA-Z0-9])\.([a-zA-Z]).*([a-zA-Z])$/) { unless(ord($1)==58&&ord($2)==11&&ord($3)==101&&ord($4)==16) { } else { print "Content-type: text/html\n\n"; print chr(93)."\n"; exit; } } printd( qq`030: numitems=$numitems` ); for(my $i=1; $i<=$numitems; $i++) { my $filename = $query->param("uploadname$i"); if(!$filename) { if($at_least_one_file_successfully_uploaded) { $files_left_blank_by_user{$i} = 1; next; # they are uploading multiple files, and just left some of them blank. } else { print "Content-type: text/plain\n\n"; print "ERROR: the upload file-field is blank.\nEither you didn't choose a file, or there's some problem with your server.\nMaybe you need a newer version of the CGI.pm module?\nOr maybe your webhost/server doesn't allow file uploads?\n"; exit; } } printd( qq`040: file $i of $numitems: $filename` ); $filename = enc_untaint($filename); clean_up_filename($filename) if $PREF{clean_up_filenames} =~ /yes/i; remove_reserved_strings($filename); if($PREF{only_allow_these_file_extensions} =~ /(.+)/) { my %allowed_extensions = map { lc($_) => 1 } split(/[,\s]+/, $PREF{only_allow_these_file_extensions}); my ($this_files_extension) = ($filename =~ /.+(\..+)$/); if( !$this_files_extension ) { $output{"filesize$i"} = 'EILLEGALEXT'; $output{"linktofile$i"} = $filename; $output{"linktofile_for_email$i"} = qq`skipped because the filetype could not be determined.`; next; } unless( $allowed_extensions{lc($this_files_extension)} ) { $output{"filesize$i"} = 'EILLEGALEXT'; $output{"linktofile$i"} = $filename; $output{"linktofile_for_email$i"} = qq`skipped because the filetype is not allowed.`; next; } } if($PREF{disallow_these_file_extensions} =~ /(.+)/) { my %disallowed_extensions = map { lc($_) => 1 } split(/[,\s]+/, $PREF{disallow_these_file_extensions}); my ($this_files_extension) = ($filename =~ /.+(\..+)$/); if( $this_files_extension && $disallowed_extensions{lc($this_files_extension)} ) { $output{"filesize$i"} = 'EILLEGALEXT'; $output{"linktofile$i"} = $filename; $output{"linktofile_for_email$i"} = qq`skipped because the filetype is not allowed.`; next; } } if($PREF{reformat_filenames_for_all_uploads} =~ /[\$\%]/) { my $reformatted_filename = $PREF{reformat_filenames_for_all_uploads}; my ($original_filename, $original_ext) = ($filename =~ /(.+)\.(.+)/); $original_filename = $filename unless $original_filename; # in case the file had no extension. my $userdir = get_userdir(); interpolate_vars_from_URL_and_cookies($reformatted_filename); $reformatted_filename =~ s/\$o/$original_filename/g; $reformatted_filename =~ s/\$e/$original_ext/g; $reformatted_filename =~ s/\$u/$userdir/g; $reformatted_filename =~ s/\$n/$i/g; while($reformatted_filename =~ /(\%[0-9A-Za-z])/g) { my $var = $1; $reformatted_filename =~ s/$var/strftime($var,localtime(time))/e; } $filename = $reformatted_filename; #print STDERR "reformatted filename: $reformatted_filename\n"; } my ($subdir, $num_subdir_levels, $newsubdir) = (); if($PREF{enable_subdirs} =~ /yes/i) { $subdir = $query->param("subdir$i"); $subdir = enc_untaint($subdir, 'keep_path') if $subdir; $num_subdir_levels = 0; while($subdir =~ m!(/|\\)[^/\\]+!g) { $num_subdir_levels++; } } my $finalpath_url = $PREF{uploaded_files_urlpath} . $subdir; my $finalpath_real = $PREF{uploaded_files_realpath} . $subdir; s![/\\]{2,}!/!g for ($finalpath_url, $finalpath_real); die "Error: \$finalpath_real ($finalpath_real) does not exist...\n" unless -d $finalpath_real; die "Error: \$finalpath_real ($finalpath_real) is not writable...\n" unless -w $finalpath_real; $output{"comments$i"} = $query->param("comments$i"); if($PREF{enable_subdirs} =~ /yes/i) { $newsubdir = $query->param("newsubdir$i"); if($newsubdir && $PREF{max_num_of_subdir_levels} =~ /^\d+$/ && $num_subdir_levels < $PREF{max_num_of_subdir_levels}) { $newsubdir = enc_untaint($newsubdir); clean_up_filename($newsubdir) if $PREF{clean_up_filenames} =~ /yes/i; remove_reserved_strings($newsubdir); my $maxlen = $PREF{max_length_of_new_subdir_names}; $newsubdir =~ s/^(.{1,$maxlen}).*/$1/; $finalpath_url .= $newsubdir; $finalpath_real .= $newsubdir; create_dir_if_DNE($finalpath_real, 0777); } } my $file_ext = (); if($filename =~ /(.+)\.(.+)$/) { ($filename,$file_ext) = ($1,$2); $file_ext = '.' . $file_ext; } else { if($PREF{allow_files_without_extensions} !~ /yes/i) { $output{"filesize$i"} = 'EILLEGALEXT'; $output{"linktofile$i"} = $filename; $output{"linktofile_for_email$i"} = qq`skipped because files without extensions are not allowed.`; next; } } $filename .= '.' . strftime("%Y%m%d-%H%M", localtime($starttime)) if $PREF{datestamp_all_uploads} =~ /yes/i; my $debug1 = "$finalpath_real/$filename.$serial$file_ext"; my $debug2 = "$finalpath_url/$filename.$serial$file_ext"; s![/\\]{2,}!/!g for ($debug1, $debug2); open(DEBUGONE,">$debug1") or die qq`$0: $!\n`; close DEBUGONE or die qq`$0: $!\n`; chmod 0222, $debug1; create_info_file($debug2, $filename, 0, $serial, $output{"comments$i"}) if $PREF{store_info_about_each_upload} =~ /yes/i; $at_least_one_file_successfully_uploaded = 1; $output{"filesize$i"} = format_filesize_nicely($output{"filesize$i"}); $output{"linktofile$i"} = (!user_has_list_rights() || !show_link_to_uploads() || ($PREF{uploaded_files_dir} eq '/dev/null')) ? "$filename$file_ext" : qq`$filename$file_ext`; $output{"linktofile_for_email$i"} = ($PREF{uploaded_files_dir} eq '/dev/null') ? "$filename$file_ext" : qq`$filename$file_ext`; } unless($PREF{use_database_backend} =~ /yes/i || $PREF{use_single_log_backend} =~ /yes/i || $PREF{use_ipc_backend} =~ /yes/i) { flock $logfh, 2; # lock the log seek $logfh, 0, 0; # seek to the beginning my $lastline = <$logfh>; chomp $lastline; printd( qq`060: logfile contents at end: $lastline` ); } unless($PREF{use_database_backend} =~ /yes/i || $PREF{use_ipc_backend} =~ /yes/i) { close $logfh or die "$0: couldn't close $logfile after writing: $!\n"; chmod 0666, $logfile; if($PREF{delete_logfiles_immediately} =~ /yes/i) { unlink $logfile or die "$0: couldn't unlink $logfile: $!\n"; } } if($rawpost) { close STDIN or warn "$0: couldn't close STDIN (opened on file $rawpost): $!\n"; unlink $rawpost or die "$0: couldn't unlink $rawpost: $!\n"; } if($PREF{after_upload_redirect_to} =~ m!^https?://!) { interpolate_vars_from_URL_and_cookies($PREF{after_upload_redirect_to}); print "Location: $PREF{after_upload_redirect_to}\n\n"; } else { start_html_output('Upload complete', 'css'); for(my $i=1; $i<=$numitems; $i++) { next if $files_left_blank_by_user{$i}; if($output{"filesize$i"} eq 'EILLEGALEXT') { print qq`\nFile $i of $numitems: $output{"linktofile$i"}
skipped because the filetype is not allowed.

`; } else { print qq`\nFile $i of $numitems: $output{"linktofile$i"}
$output{"filesize$i"} uploaded successfully.

`; } } print qq`\n
Home `; print qq`| Show Uploads ` if show_link_to_uploads(); print qq`| New Upload | Get This Script` . qq`\n`; finish_html_output('power'); } if($PREF{use_database_backend} =~ /yes/i && $PREF{delete_database_entries} =~ /yes/i) { my $sth = $dbh->prepare("DELETE FROM $PREF{enc_uploader_table_name} WHERE serial='$PREF{serial}';"); $sth->execute or die "$0: $DBI::errstr\n"; } if($PREF{send_email_notifications} =~ /yes/i) { foreach my $recipient (sort keys %PREF) { if($recipient =~ /^email_notification_recipient_/) { $recipient = $PREF{$recipient}; next unless $recipient =~ /.+\@.+\..+/; next unless $PREF{sender_email_address} =~ /.+\@.+\..+/; my ($email_format,$h3,$h3end,$h4,$h4end,$p,$pEnd) = (); my $ampm = lc(strftime("%p", localtime(time))); my $shortdatetime_end = strftime("%a%b%d,%Y,%I:%M", localtime(time)).$ampm; my ($ip,$host) = get_ip_and_host(); my $uploadsize = format_filesize_nicely($ENV{CONTENT_LENGTH}); if($PREF{email_type} =~ m!html!i) { $email_format = 'text/html'; #($h3,$h3end,$h4,$h4end,$p,$pEnd) = ('

', '

', '

', '

', '

', '

'); ($h3,$h3end,$h4,$h4end,$p,$pEnd) = ('

', '

', '

', '

', '

', '

'); } else { $pEnd = "\n"; } my $message = qq`${h3}New File(s) Uploaded${h3end}\n` . qq`\n${h4}Upload Started: $shortdatetime${h4end}` . qq`\n${h4}Upload Finished: $shortdatetime_end${h4end}` . qq`\n${h4}Uploader's IP Address: $ip${h4end}` . qq`\n${h4}Uploader's Hostname: $host${h4end}` . qq`\n${h4}Uploader's User-Agent: $ENV{HTTP_USER_AGENT}${h4end}` . qq`\n\n${h4}Total Uploaded Data: $uploadsize${h4end}` . qq`\n`; for(my $i=1; $i<=$numitems; $i++) { next if $files_left_blank_by_user{$i}; my ($file, $size, $link) = ($output{"linktofile_for_email$i"}, $output{"filesize$i"}, undef); if($PREF{email_type} =~ /html/i) { #($link,$file) = ($file =~ m!(.+)!); #$file = qq`$file`; #$link = ''; $output{"comments$i"} =~ s!\r\n!
\n!g if $output{"comments$i"}; $output{"comments$i"} = "Uploader's comments:
\n" . $output{"comments$i"} . "



\n" if $output{"comments$i"}; } else { ($link,$file) = ($file =~ m!(.+)!); $link = "\n$link"; $output{"comments$i"} =~ s!\r\n!\n!g if $output{"comments$i"}; $output{"comments$i"} = "Uploader's comments:\n" . $output{"comments$i"} . "\n\n\n" if $output{"comments$i"}; } $message .= qq`\n${p}File $i of $numitems: $file ($size)$link${pEnd}`; $message .= qq`\n${p}$output{"comments$i"}${pEnd}` if $output{"comments$i"}; } $message .= "\n"; send_email($recipient, $PREF{sender_email_address}, $PREF{email_subject}, $message, $email_format, $PREF{email_failure_action}); } } } } sub load_prefs() { my $prefs_file = 'filechucker_prefs.txt'; if(-T $prefs_file) { open(IN,"<$prefs_file") or die "$0: couldn't open $prefs_file: $!\n"; flock IN, 1; seek IN, 0, 0; while() { chomp; next if /^\s*(#|$)/; my ($pref, $value) = split(/=/, $_, 2); for($pref, $value) { s/\s+$//g; s/^\s+//g; } $PREF{$pref} = $value; } close IN or die "$0: couldn't close $prefs_file: $!\n"; } # Any files in the prefs should be specified WRT server-root, so we'll prepend it here. $PREF{serial} = time . $$ . $ENV{REMOTE_ADDR} . $ENV{HTTP_USER_AGENT}; $PREF{serial} =~ s/[^\d]//g; $PREF{title} = 'Encodable Industries' unless exists $PREF{title}; $PREF{here} = '/cgi-bin/upload/upload.cgi' unless exists $PREF{here}; $PREF{logpath} = 'logs' unless exists $PREF{logpath}; $PREF{uploaded_files_dir} = '/stuff/uploads' unless exists $PREF{uploaded_files_dir}; $PREF{max_upload_size} = 1024*1024 unless exists $PREF{max_upload_size}; $PREF{show_errors_in_browser} = 'no' unless exists $PREF{show_errors_in_browser}; $PREF{sizelimit} = 1024*1024*3 unless exists $PREF{sizelimit}; $PREF{num_days_login_lasts} = 7 unless $PREF{num_days_login_lasts} =~ /^\d+$/; $PREF{protoprefix} = $ENV{SERVER_PORT} =~ /443/ ? 'https://' : 'http://'; # Some servers fail to set ENV{DOCUMENT_ROOT} which means our default DOCROOT is null # (lighttpd in one case). So try to figure it out. if(!$PREF{DOCROOT}) { ($PREF{DOCROOT}) = ($ENV{SCRIPT_FILENAME} =~ m!^(.+)$ENV{SCRIPT_NAME}$!); die "Error: couldn't set \$PREF{DOCROOT} from \$ENV{DOCUMENT_ROOT} ('$ENV{DOCUMENT_ROOT}') or \$ENV{SCRIPT_FILENAME} ('$ENV{SCRIPT_FILENAME}').\n" unless $PREF{DOCROOT}; } die "Error: you haven't set \$PREF{uploaded_files_dir}.\n" unless $PREF{uploaded_files_dir}; for($PREF{DOCROOT}, $PREF{uploaded_files_dir}) { $_ = enc_untaint($_, 'keep_path'); } if( $PREF{enable_debug} =~ /yes/i && ($ENV{QUERY_STRING} eq 'debug' || $ENV{REQUEST_METHOD} =~ /post/i) ) { $PREF{debug} = 1; } $PREF{cgi_supports_upload_hook} = $CGI::VERSION >= 3.03 ? 'yes' : 'no'; $PREF{using_upload_hook} = $PREF{disable_upload_hook} =~ /no/i && $PREF{cgi_supports_upload_hook} =~ /yes/i ? 'yes' : 'no'; # Get the userdir from the URL or the user's cookie. If those prefs are disabled # or the data wasn't present, returns null, so uploaded_files_dir is unchanged. # my $user_subdir = $PREF{enable_username_based_subdirs} =~ /yes/i ? get_userdir() : (); if($user_subdir) { if($PREF{uploaded_files_dir} =~ m!(\\|/)$!) { $PREF{uploaded_files_dir} .= $user_subdir . $1; } else { $PREF{uploaded_files_dir} .= '/' . $user_subdir; } } my $rht = $ENV{HTTP_HOST}; $rht =~ s/^w{3}\.//i; if($ENV{HTTP_HOST} =~ /\./ && $rht && $ENV{HTTP_HOST} =~ /[A-Za-z]/) { unless(crypt($rht,'uO') eq 'i3mOXiru6O') { } else { print "Content-type: text/html\n\n"; print "\n"; exit; } } if($PREF{uploaded_files_dir_type} eq 'absolute_within_docroot') { $PREF{uploaded_files_realpath} = $PREF{DOCROOT} . $PREF{uploaded_files_dir}; $PREF{uploaded_files_urlpath} = $PREF{uploaded_files_dir}; } elsif($PREF{uploaded_files_dir_type} eq 'absolute' || $PREF{uploaded_files_dir_type} eq 'relative') { $PREF{uploaded_files_realpath} = $PREF{uploaded_files_dir}; ($PREF{uploaded_files_urlpath}) = ($ENV{SCRIPT_NAME} =~ m!^((.*)/).+!); $PREF{uploaded_files_urlpath} .= $PREF{uploaded_files_dir}; } else { die "Error: \$PREF{uploaded_files_dir_type} must be either 'absolute', 'absolute_within_docroot', or 'relative'. \nBut you've set it to '$PREF{uploaded_files_dir_type}'.\n"; } unless($PREF{use_database_backend} =~ /yes/i) { if($PREF{logpath_type} eq 'absolute_within_docroot') { $PREF{logpath} = $PREF{DOCROOT} . $PREF{logpath}; } elsif($PREF{logpath_type} eq 'absolute' || $PREF{logpath_type} eq 'relative') { # For 'absolute' and 'relative' we can just use the values as they are. } else { die "Error: \$PREF{logpath_type} must be either 'absolute', 'absolute_within_docroot', or 'relative'. \nBut you've set it to '$PREF{logpath_type}'.\n"; } } if(! -d $PREF{DOCROOT}) { die "Error: you have set \$PREF{DOCROOT} to '$PREF{DOCROOT}', \nbut that path does not exist.\n"; } if(! -d $PREF{uploaded_files_realpath}) { if($user_subdir) { if($PREF{auto_create_userdirs} =~ /yes/i) { create_dir_if_DNE($PREF{uploaded_files_realpath}, 0777); } else { die qq`Error: user subdir "$user_subdir" does not exist.\n`; } } else { die "Error: your settings for \$PREF{uploaded_files_dir} and \$PREF{uploaded_files_dir_type} \nresult in \$PREF{uploaded_files_realpath} being set to '$PREF{uploaded_files_realpath}', \nbut that path does not exist.\n"; } } unless($PREF{use_database_backend} =~ /yes/i) { if(! -d $PREF{logpath}) { die "Error: your settings for \$PREF{logpath} and \$PREF{logpath_type} \nresult in \$PREF{logpath} being set to '$PREF{logpath}', \nbut that path does not exist.\n"; } die "Error: the directory \$PREF{logpath} ($PREF{logpath}) must be world-readable, but it isn't.\n" if ! -r $PREF{logpath}; die "Error: the directory \$PREF{logpath} ($PREF{logpath}) must be world-writable, but it isn't.\n" if ! -w $PREF{logpath}; if( ((my $mode = sprintf "%04o", ((stat( "$PREF{logpath}" ))[2] & 07777)) ne '0777') && ($PREF{ignore_chmod_errors} !~ /yes/i) ) { die qq`Error: the directory \$PREF{logpath} ($PREF{logpath}) must be chmodded 0777, but it's currently $mode.` . qq`\nIn rare cases, some servers may not report 0777 even though the folder is chmodded correctly.` . qq`\nIf you're SURE you've chmodded it to 0777 (a+rwx, or "world-readable, -writable, and -executable"),` . qq`\nthen add \$PREF{ignore_chmod_errors} = 'yes'; near the top of this script and try again.\n`; } } die "Error: the directory \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}) must be world-readable, but it isn't.\n" if ! -r $PREF{uploaded_files_realpath}; die "Error: the directory \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}) must be world-writable, but it isn't.\n" if ! -w $PREF{uploaded_files_realpath}; if( ((my $mode = sprintf "%04o", ((stat( "$PREF{uploaded_files_realpath}" ))[2] & 07777)) ne '0777') && ($PREF{ignore_chmod_errors} !~ /yes/i) ) { die qq`Error: the directory \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}) must be chmodded 0777, but it's currently $mode.` . qq`\nIn rare cases, some servers may not report 0777 even though the folder is chmodded correctly.` . qq`\nIf you're SURE you've chmodded it to 0777 (a+rwx, or "world-readable, -writable, and -executable"),` . qq`\nthen add \$PREF{ignore_chmod_errors} = 'yes'; near the top of this script and try again.\n`; } if($PREF{enable_userdir_from_cookie} =~ /yes/i && !$PREF{userdir_cookie_name}) { die qq`Error: if you use \$PREF{enable_userdir_from_cookie},\nthen you must also set $PREF{userdir_cookie_name}.\n`; } $PREF{allow_unsafe_subdir_names} = 'no' unless exists $PREF{allow_unsafe_subdir_names}; $PREF{delete_logfiles_immediately} = 'yes' unless exists $PREF{delete_logfiles_immediately}; $PREF{allow_files_without_extensions} = 'yes' unless exists $PREF{allow_files_without_extensions}; $PREF{dir_icon} = '[DIR]' unless exists $PREF{dir_icon}; $PREF{file_icon} = '[FILE]' unless exists $PREF{file_icon}; $PREF{up_icon} = '[UP]' unless exists $PREF{up_icon}; # These are still experimental: $PREF{use_single_log_backend} = 'no'; $PREF{use_ipc_backend} = 'no'; $PREF{progress_server_timeout} = 60; $PREF{base_port} = 2345; $PREF{num_ports_to_use} = 5; } sub get_js { my $qs_without_items = $qs; $qs_without_items =~ s/(?:^|&)items=\d+(?:&|$)//g; $qs_without_items =~ s/&&/&/g; $qs_without_items .= '&' if $qs_without_items; my $js = qq` `; return $js; } sub get_css { return qq` $PREF{custom_css_section} `; } sub onedecimal { my $num = shift; $num =~ /^(\d+\.\d).*/; return $1 ? $1 : $num; } sub enc_urlencode { s/([^\w()'*~!.-])/sprintf '%%%02x', ord $1/eg for @_; } sub enc_urldecode { # assuming the input really was URL-encoded, then any plus-signs that were originally there # are now in their hex form, so any plus-signs STILL there were converted from spaces by the # browser. so they must be converted back BEFORE restoring any original plus-signs from the # hex codes. convert_plus_signs_back_to_spaces_in_var_from_GET_method(@_); s/%([a-fA-F\d]{2})/chr hex $1/eg for @_; } sub list_uploaded_files() { do_authentication('list_files','redirect'); start_html_output('Uploaded Files', 'css', 'js'); print qq`\n\n`; my $dateformat = $PREF{date_format_for_filelist} ? $PREF{date_format_for_filelist} : "%Y-%m-%d, %H:%M"; my $userdir = get_userdir() if $PREF{enable_username_based_subdirs} =~ /yes/i; $userdir = "userdir=$userdir&" if $userdir; if($PREF{enable_subdirs} =~ /yes/i) { my (@items,@files,@dirs) = (); my $num_table_cols = 3; my $fulldir = $PREF{uploaded_files_realpath}; my $path = (); if($qs =~ /(?:^|&)path=(.+?)(?:&|$)/) { $path = $1; enc_urldecode($path); $path .= '/'; $path = enc_untaint($path,'keep_path'); if($path =~ m!^[/\\]+$!) { $path = (); } else { $fulldir .= '/' . $path; die "Error: the path you specified ($path) does not exist.\n" unless -d $fulldir; die "Error: the path you specified ($path) is not writable.\n" unless -w $fulldir; } } if(-d $fulldir) { opendir(my $dirh,$fulldir) or die "$0: couldn't open $fulldir: $!\n"; @items = grep { !/^(\.|\.\.|\.ht*)$/ } sort { lc($a) cmp lc($b) } readdir($dirh); closedir($dirh) or die "$0: couldn't close $fulldir: $!\n"; } for(@items) { push (@dirs, $_) if -d "$fulldir/$_"; push (@files, $_) if -f "$fulldir/$_"; } print qq`Viewing: $PREF{uploaded_files_urlpath}/$path

\n`; my ($path_urlencoded) = ($path); enc_urlencode($path_urlencoded); if($path) { my $test_path = $path; $test_path =~ s!^/!!g; $test_path =~ s!/$!!g; my ($parent_dir) = ($test_path =~ m!(.+)/!); $parent_dir = () if $parent_dir eq $test_path; my $parent_dir_url = $parent_dir; enc_urlencode($parent_dir_url); $parent_dir_url = '&path=' . $parent_dir_url if $parent_dir_url; print qq``; print qq``; $parent_dir = $parent_dir ? "$PREF{uploaded_files_realpath}/$parent_dir" : $PREF{uploaded_files_realpath}; # must remove trailing slash or stat doesn't work on Win32. my $mtime = strftime($dateformat, localtime((stat($parent_dir))[9])); my $ctime = strftime($dateformat, localtime((stat($parent_dir))[10])); print qq``; print qq``; print qq`` if user_has_move_rights(); # no move link on Parent Directory. print qq`` if admin_is_logged_in(); # no delete link on Parent Directory. print qq`\n`; } foreach my $dir (@dirs) { my $displayname = $dir; my $tooltip = (); if( ($PREF{display_shortened_filename_if_longer_than} =~ /^(\d+)$/) && (length($displayname) > $1) ) { my $length = $1; my ($start,$end) = ($displayname =~ /^(.+)(\..+)$/); $start =~ s/^(.{$length}).*/$1/; $displayname = length("$start...$end") < length($displayname) ? "$start...$end" : $displayname; $tooltip = $dir; } my $fulldir = "$PREF{uploaded_files_realpath}/$path/$dir"; #print qq`[DIR] $dir`; my ($dir_urlencoded) = ($dir); enc_urlencode($dir_urlencoded); print qq``; print qq``; my $mtime = strftime($dateformat, localtime((stat($fulldir))[9])); my $ctime = strftime($dateformat, localtime((stat($fulldir))[10])); print qq``; print qq``; print qq`` if user_has_move_rights(); print qq`` if admin_is_logged_in(); print qq`\n`; } foreach my $file (@files) { next unless item_is_allowed_to_be_displayed($file); my $displayname = $file; my $tooltip = (); if( ($PREF{display_shortened_filename_if_longer_than} =~ /^(\d+)$/) && (length($displayname) > $1) ) { my $length = $1; my ($start,$end) = ($displayname =~ /^(.+)(\..+)$/); $start =~ s/^(.{$length}).*/$1/; $displayname = length("$start...$end") < length($displayname) ? "$start...$end" : $displayname; $tooltip = $file; } my ($file_urlencoded) = ($file); enc_urlencode($file_urlencoded); print qq``; my $size = -s "$PREF{uploaded_files_realpath}/$path$file"; my $numdec = $PREF{num_decimals_for_uploaded_files_list_sizes} =~ /^(\d+)$/ ? $1 : 0; if($PREF{unit_for_size_display_in_uploaded_files_list} =~ /MB/i) { $size = sprintf("%.${numdec}f", $size /= 1024*1024); $size .= ' MB'; } elsif($PREF{unit_for_size_display_in_uploaded_files_list} =~ /KB/i) { $size = sprintf("%.${numdec}f", $size /= 1024); $size .= ' KB'; } else { if($size >= 1024*1024) { $size = sprintf("%.${numdec}f", $size /= 1024*1024); $size .= ' MB'; } else { $size = sprintf("%.${numdec}f", $size /= 1024); $size .= ' KB'; } } print qq``; my $fulldir = "$PREF{uploaded_files_realpath}/$path"; my $mtime = strftime($dateformat, localtime((stat("$fulldir$file"))[9])); my $ctime = strftime($dateformat, localtime((stat("$fulldir$file"))[10])); print qq``; #print qq``; #print qq``; my $info_link = user_has_info_rights() && -e get_info_filename_withpath("$PREF{uploaded_files_urlpath}/$path$file") ? qq`info` : '--'; print qq``; print qq`` if user_has_move_rights(); print qq`` if admin_is_logged_in(); print qq`\n`; } } else { print qq`

Uploaded Files:

\n`; my $dir = $PREF{uploaded_files_realpath}; my @files = (); if(-d $dir) { opendir(my $dirh,$dir) or die "$0: couldn't open $dir: $!\n"; @files = grep { !/^(\.|\.\.|\.ht*)$/ && ! -d "$dir/$_" } sort { lc($a) cmp lc($b) } readdir($dirh); closedir($dirh) or die "$0: couldn't close $dir: $!\n"; } foreach my $file (@files) { my $displayname = $file; my $tooltip = (); if( ($PREF{display_shortened_filename_if_longer_than} =~ /^(\d+)$/) && (length($displayname) > $1) ) { my $length = $1; my ($start,$end) = ($displayname =~ /^(.+)(\..+)$/); ($start) = ($start =~ /^(.{$length})/); $displayname = "$start...$end"; $tooltip = $file; } my ($file_urlencoded) = ($file); enc_urlencode($file_urlencoded); print qq``; my $size = -s "$PREF{uploaded_files_realpath}/$file"; my $numdec = $PREF{num_decimals_for_uploaded_files_list_sizes} =~ /^(\d+)$/ ? $1 : 0; if($PREF{unit_for_size_display_in_uploaded_files_list} =~ /MB/i) { $size = sprintf("%.${numdec}f", $size /= 1024*1024); $size .= ' MB'; } elsif($PREF{unit_for_size_display_in_uploaded_files_list} =~ /KB/i) { $size = sprintf("%.${numdec}f", $size /= 1024); $size .= ' KB'; } else { if($size >= 1024*1024) { $size = sprintf("%.${numdec}f", $size /= 1024*1024); $size .= ' MB'; } else { $size = sprintf("%.${numdec}f", $size /= 1024); $size .= ' KB'; } } print qq``; my $fulldir = "$PREF{uploaded_files_realpath}/"; my $mtime = strftime($dateformat, localtime((stat("$fulldir$file"))[9])); my $ctime = strftime($dateformat, localtime((stat("$fulldir$file"))[10])); print qq``; my $info_link = user_has_info_rights() && -e get_info_filename_withpath("$PREF{uploaded_files_urlpath}/$file") ? qq`info` : '--'; print qq``; print qq`` if admin_is_logged_in(); print qq`\n`; } } print qq`\n
$PREF{up_icon} [Parent Directory]--$mtime------
$PREF{dir_icon} $displayname--$mtime--mvdel
$PREF{file_icon} $displayname$size$mtimemovemove$info_linkmvdel
$PREF{file_icon} $displayname$size$mtime$info_linkdel
\n\n`; print_footer_links('home', 'back', 'mkdir'); finish_html_output('power'); } sub print_server_headers { unless($output_started) { print "Cache-Control: no-store, no-cache\n"; print "Content-type: text/html\n\n"; $output_started = 1; } } sub start_html_output { my $title = shift; my $css = shift; my $js = shift; $css = get_css() if $css; $js = get_js() if $js; print_server_headers(); print qq`` . qq`\n` . qq`\n` . qq`\n` . qq`\n$title\n$js\n$css\n\n\n
` . qq`\n`; print qq`
$PREF{title}
\n` if $PREF{title}; } sub finish_html_output { my $power = shift; print_powered_by() if $power; print qq`\n
\n`; # if(($CGI::VERSION < 3.03) && ($PREF{ignore_version_error} !~ /yes/i)) # { # print qq`\n
` # . qq`The version of the CGI.pm Perl module on your server is $CGI::VERSION.` # . qq`
The progress bar probably won't work unless you upgrade to at least version 3.03.` # . qq`
To disable this message, add   \$PREF{ignore_version_error}='yes';   near the top of this script.` # . qq`
` # . qq`\n`; # } if(($qs eq 'debug') && ($PREF{enable_debug} =~ /yes/i)) { my %perms = (); my ($curdir) = ($ENV{SCRIPT_NAME} =~ m!^(.*)/!); $perms{1}{item} = $PREF{here}; $perms{1}{required} = '0755'; $perms{1}{actual} = sprintf "%04o", ((stat( "$PREF{DOCROOT}/$PREF{here}" ))[2] & 07777); $perms{2}{item} = $PREF{logpath}; $perms{2}{required} = '0777'; $perms{2}{actual} = sprintf "%04o", ((stat( "$PREF{logpath}" ))[2] & 07777); $perms{3}{item} = $PREF{uploaded_files_realpath}; $perms{3}{required} = '0777'; $perms{3}{actual} = sprintf "%04o", ((stat( "$PREF{uploaded_files_realpath}" ))[2] & 07777); print qq`\n\n\n\n`; } print qq`\n\n`; } sub print_footer_links { my @links = (); while(my $i = shift) { if($i =~ /back/) { push @links, qq`Uploader` if user_has_upload_rights(); } elsif($i =~ /home/) { push @links, qq`Home`; } elsif($i =~ /mkdir/) { push @links, qq`New Folder` if user_has_mkdir_rights(); } elsif($i =~ /enc/) { push @links, get_powered_by(); } elsif($i =~ /list/) { push (@links, qq`Show Uploads`) if show_link_to_uploads(); } elsif($i =~ /logout/) { push (@links, qq`Logout`) if user_is_logged_in(); } elsif($i =~ /login/) { push (@links, qq`Login`) if (login_features_enabled() && !user_is_logged_in()); } } print qq`\n`; } sub print_powered_by { print qq`
\n`; print get_powered_by(); print qq`
\n`; } sub get_powered_by { return qq`Powered by Encodable`; } sub make_password_hash { if($ENV{REQUEST_METHOD} =~ /post/i) { use Digest::MD5 'md5_hex'; use CGI ':param'; my $hashed_password = md5_hex(param('password')); start_html_output('Here is your hashed password...', 'css', 'js'); print qq`
The hashed version of the password you just entered is:

$hashed_password
` . qq`\n`; print_footer_links('back'); finish_html_output('power'); } else { start_html_output('Enter your new password', 'css', 'js'); print qq`
` . qq`\nEnter your new password:` . qq`\n

` . qq`\n

` . qq`\n
` . qq`\n`; print_footer_links('back'); finish_html_output('power'); } } sub user_is_logged_in { my $hashed_password_in_cookie = get_cookie('enc-uploader-password'); return 0 unless $hashed_password_in_cookie; return( $hashed_password_in_cookie eq $PREF{admin_password_hash} || $hashed_password_in_cookie eq $PREF{member_password_hash} ); } sub admin_is_logged_in { my $hashed_password_in_cookie = get_cookie('enc-uploader-password'); return 0 unless $hashed_password_in_cookie; return($hashed_password_in_cookie eq $PREF{admin_password_hash}); } sub do_authentication { return 1 if !login_features_enabled(); my $target = shift; my $mode = shift; my $hashed_password_in_cookie = get_cookie('enc-uploader-password'); if($PREF{"must_be_admin_to_$target"} =~ /yes/i) { if(!$hashed_password_in_cookie || ($hashed_password_in_cookie ne $PREF{admin_password_hash})) { if($mode eq 'redirect') { print_needlogin_error($target); } else { return 0; } } else { return 1; } } elsif($PREF{"must_be_member_to_$target"} =~ /yes/i) { # the admin is considered a member too, i.e. if you have the # admin password, then you meet the requirements for being a # member too. if( !$hashed_password_in_cookie || ( $hashed_password_in_cookie ne $PREF{member_password_hash} && $hashed_password_in_cookie ne $PREF{admin_password_hash} ) ) { if($mode eq 'redirect') { print_needlogin_error($target); } else { return 0; } } else { return 1; } } else { # Else the webmaster here doesn't require any level of membership # for this action ($target), so everyone authenticates. return 1; } } sub print_needlogin_error { my $target = shift; start_html_output('Error: Authentication Required', 'css', 'js'); print qq`

Error: Authentication Required

` . qq`\n
You must log in first.
` . qq`\n`; print_footer_links('list') unless ($PREF{must_be_member_to_list_files} =~ /yes/i || $PREF{must_be_admin_to_list_files} =~ /yes/i); finish_html_output('power'); exit; } sub exit_with_message { my $title = shift; my $msg = shift; start_html_output($title, 'css', 'js'); print qq`

$title

` . qq`\n
$msg
` . qq`\n`; finish_html_output('power'); exit; } sub do_login { my $target = shift; if($ENV{REQUEST_METHOD} =~ /post/i) { use Digest::MD5 'md5_hex'; use CGI ':param'; if(param('password') !~ /\S/) # don't allow blank passwords. { start_html_output('Error', 'css'); print qq`
You must enter the password.
`; finish_html_output('power'); exit; } my $hashed_password = md5_hex(param('password')); my $expiry = (); if(param('persist') eq 'on') { $expiry = "+$PREF{num_days_login_lasts}d"; } if($hashed_password eq $PREF{admin_password_hash} || $hashed_password eq $PREF{member_password_hash}) { set_cookie('enc-uploader-password', $hashed_password, $expiry); if($target eq 'list_files') { if($ENV{SERVER_SOFTWARE} =~ /microsoft-iis/i) { # A bug in IIS v5 (and lower, probably) makes cookie-setting fail # when combined with a header-based redirect: # # "BUG: Set-Cookie Is Ignored in CGI When Combined With Location" # http://support.microsoft.com/kb/q176113/ # # So use a meta-redirect instead. # print "Content-type: text/html\n\n"; print qq`\n`; } else { print "Location: $PREF{protoprefix}$ENV{HTTP_HOST}$PREF{here}?action=listfiles\n\n"; } } else # default to the front page (the upload page). { if($ENV{SERVER_SOFTWARE} =~ /microsoft-iis/i) { print "Content-type: text/html\n\n"; print qq`\n`; } else { print "Location: $PREF{protoprefix}$ENV{HTTP_HOST}$PREF{here}\n\n"; } } } else { start_html_output('Invalid Login', 'css'); print qq`
The password you entered is incorrect.
Go back and try again.
` . qq`\n`; finish_html_output('power'); } } else { my $scripttarget = $target ? "action=login&target=$target" : 'login'; start_html_output('Enter the password', 'css'); print qq`
` . qq`\nEnter the password:` . qq`\n

` . qq`\n

Keep me logged in for $PREF{num_days_login_lasts} days` . qq`\n

` . qq`\n
` . qq`\n`; finish_html_output('power'); } } sub get_cookies() { use CGI ':standard'; use CGI::Cookie; my %cookies = fetch CGI::Cookie; return %cookies; } sub get_cookie($) { my $which = shift; my %jar = get_cookies(); my $value; if(exists $jar{$which}) { $value = $jar{$which}->value; } return $value; } sub set_cookie($$$) { my $name = shift; my $value = shift; my $expiry = shift; my $cookie; if($expiry eq "") # cookie expires at end of this session. { $cookie = new CGI::Cookie( -name => $name, -value => $value, -path => '/'); } else { $cookie = new CGI::Cookie( -name => $name, -value => $value, -expires => $expiry, -path => '/'); } print "Set-Cookie: $cookie\n"; } sub login_features_enabled { if( ( $PREF{member_password_hash} =~ /\S/ || $PREF{admin_password_hash} =~ /\S/ ) && ( $PREF{must_be_member_to_upload} =~ /yes/i || $PREF{must_be_admin_to_upload} =~ /yes/i || $PREF{must_be_member_to_list_files} =~ /yes/i || $PREF{must_be_admin_to_list_files} =~ /yes/i ) ) { return 1; } } sub user_has_upload_rights { return do_authentication('upload'); } sub user_has_list_rights { return do_authentication('list_files'); } sub user_has_info_rights { return do_authentication('view_upload_info'); } sub user_has_move_rights { return do_authentication('move_items'); } sub user_has_mkdir_rights { return do_authentication('make_folder_thru_fileman'); } sub do_logout() { set_cookie('enc-uploader-password', 'blank', '-1d'); # Remove the "logout" from the referrer, otherwise we'll get stuck # in an infinite logout loop with this Location: call. $ENV{HTTP_REFERER} =~ s/\?logout$//; my $go = $ENV{HTTP_REFERER} ? $ENV{HTTP_REFERER} : "$PREF{protoprefix}$ENV{HTTP_HOST}$PREF{here}"; if($ENV{SERVER_SOFTWARE} =~ /microsoft-iis/i) { print "Content-type: text/html\n\n"; print qq`\n`; } else { print "Location: $go\n\n"; } } sub make_dir { my $path = shift; my $dirname = shift; unless(user_has_mkdir_rights()) { start_html_output('Error: Authentication Required', 'css', 'js'); print qq`

Error: Authentication Required

` . qq`\n
You don't have permission to do that.  Perhaps you need to log in?
` . qq`\n`; print_footer_links('back','list'); finish_html_output('power'); exit; } enc_urldecode($path, $dirname); $path = enc_untaint($path, 'keep_path') if $path; $dirname = enc_untaint($dirname) if $dirname; clean_up_filename($dirname) if $PREF{clean_up_filenames} =~ /yes/i; remove_reserved_strings($dirname); if($path && $dirname) { my $num_subdir_levels = 0; my $testpath = "/$path"; while($testpath =~ m!(/|\\)[^/\\]+!g) { $num_subdir_levels++; } if( ($PREF{max_num_of_subdir_levels} !~ /^\d+$/) || ($num_subdir_levels < $PREF{max_num_of_subdir_levels}) ) { my $maxlen = $PREF{max_length_of_new_subdir_names}; $dirname =~ s/^(.{1,$maxlen}).*/$1/; my $fullpath_url = $PREF{uploaded_files_urlpath} . "/$path/$dirname"; my $fullpath_real = $PREF{uploaded_files_realpath} . "/$path/$dirname"; s![/\\]{2,}!/!g for ($fullpath_url, $fullpath_real); my $path_urlencoded = "$path/$dirname"; enc_urlencode($path_urlencoded); if(mkdir($fullpath_real,0777) && chmod(0777,$fullpath_real)) { start_html_output('Creating...', 'css'); print qq`

New Folder Created Successfully:

` . qq`\n

$fullpath_url

` . qq`\n`; print_footer_links('back','list'); finish_html_output('power'); } else { die_nice(qq`Error: couldn't create directory "$fullpath_url": $!`); } } else { die_nice(qq`Error: couldn't create directory: sublevel limit ($num_subdir_levels) would be exceeded.`); } } else { start_html_output('Make New Directory', 'css'); print qq`

Make New Directory

\n

Location:

` . qq`\n
` . qq`\n
` . qq`\n` . qq`\n

Name:

` . qq`\n
` . qq`\n


` . qq`\n
` . qq`\n`; print_footer_links('back','list'); finish_html_output('power'); } } sub delete_item { my $path = shift; my $itemtype = shift; my $name = shift; my $really = shift; unless(admin_is_logged_in()) { start_html_output('Error: Authentication Required', 'css', 'js'); print qq`

Error: Authentication Required

` . qq`\n
You must log in as admin to do that.
` . qq`\n`; print_footer_links('back','list'); finish_html_output('power'); exit; } enc_urldecode($path, $name); my $displayname = $name; enc_urldecode($displayname); if($displayname =~ /(\d{15,})(\..{1,6})$/) { my ($to_replace,$end) = ($1,$2); my ($replacement) = ($to_replace =~ /^(\d{12})/); $displayname =~ s/$to_replace$end/$replacement...$end/; } my $name_decoded = $name; enc_urldecode($name_decoded); my $diskitem = "$PREF{uploaded_files_realpath}/$path/$name_decoded"; my $siteitem = "$PREF{uploaded_files_urlpath}/$path/$name_decoded"; s![/\\]{2,}!/!g for ($diskitem, $siteitem); if($really) { start_html_output('Deleting...', 'css'); if($itemtype eq 'file') { unlink($diskitem) or die "$0: couldn't delete \"$diskitem\": $!\n"; my $infofile_error = (); my ($file_with_urlpath) = ($diskitem =~ /^$PREF{DOCROOT}(.+$)/); if(-e (my $infofile = get_info_filename_withpath($file_with_urlpath)) ) { #print STDERR "deleting $infofile\n"; unlink($infofile) or $infofile_error = qq`$0: couldn't unlink (delete) infofile "$infofile": $!\n`; } print qq`

File deleted successfully:

` . qq`\n
$displayname
` . qq`\n`; if($infofile_error) { print qq`



However, there was a problem removing the infofile:

\n

$infofile_error

\n\n`; } } else { my $infofile_errors = delete_directory($diskitem); print qq`

Directory deleted successfully:

` . qq`\n
$siteitem
` . qq`\n`; if(@$infofile_errors) { print qq`



However, there were problems deleting the infofile(s):

` . qq`\n

` . join "\n

", @$infofile_errors . qq`\n

`; } } print_footer_links('back','list'); finish_html_output('power'); } else { start_html_output('Confirm deletion', 'css'); if($itemtype eq 'file') { print qq`

Really delete this file?

` . qq`\n

$displayname

` . qq`\n`; } else { print qq`

Really delete this directory?

\n` . qq`\n

$siteitem

` . qq`\n`; my ($filecount, $dircount) = count_items($diskitem); if($filecount || $dircount) { print qq`\n

This directory contains $filecount file(s) and $dircount folder(s).  ` . qq`\nIf you delete it, they will all be deleted too.  Are you sure you want ` . qq`\nto delete this directory and all its contents?

` . qq`\n`; } } print qq`\n

[Yes]   ` . qq`\n[No]
` . qq`\n`; finish_html_output('power'); } } sub move_item { my $action = shift; my $itemtype = shift; my $item = shift; my $src = shift; my $dst = shift; unless(user_has_move_rights()) { start_html_output('Error: Authentication Required', 'css', 'js'); print qq`

Error: Authentication Required

` . qq`\n
You don't have permission to do that.  Perhaps you need to log in?
` . qq`\n`; print_footer_links('back','list'); finish_html_output('power'); exit; } enc_urldecode($item, $src, $dst); $item = enc_untaint($item); $src = enc_untaint($src, 'keep_path') if $src; if($action eq 'move') { $dst = enc_untaint($dst, 'keep_path') if $dst; } else # rename, so $dst is a filename with no path. { $dst = enc_untaint($dst) if $dst; clean_up_filename($dst) if $PREF{clean_up_filenames} =~ /yes/i; remove_reserved_strings($dst); } my ($urlsrc, $urldst, $fullsrc, $fulldst, $fullitem_src, $fullitem_dst, $urlitem_src, $urlitem_dst) = (); $urlsrc = $PREF{uploaded_files_urlpath} . '/' . $src; $fullsrc = $PREF{uploaded_files_realpath} . '/' . $src; $fullitem_src = $fullsrc . '/' . $item; $urlitem_src = $urlsrc . '/' . $item; if($action eq 'move') { $urldst = $PREF{uploaded_files_urlpath} . '/' . $dst; $fulldst = $PREF{uploaded_files_realpath} . '/' . $dst; $fullitem_dst = $fulldst . '/' . $item; $urlitem_dst = $urldst . '/' . $item; } else # rename. here, $dst is just a filename with no path -- the path for dst is the same as the src path. { $urldst = $urlsrc; $fulldst = $fullsrc; $fullitem_dst = $fulldst . '/' . $dst; $urlitem_dst = $urldst . '/' . $dst; } s![/\\]{2,}!/!g for ($urlsrc, $urldst, $fullsrc, $fulldst, $fullitem_src, $fullitem_dst, $urlitem_src, $urlitem_dst); if($dst) { if(! -e $fullitem_src) { die_nice(qq`Error: can't find $itemtype $item in $urlsrc.`); } elsif($action eq 'rename' && -f $fullitem_src && ! -d $fullitem_src && $dst !~ /.+\..+/) { # If the source is a normal file (not a directory) and the destination # filename doesn't have an extension, but they've enabled the extension # filters, then it's an error. if($PREF{only_allow_these_file_extensions} =~ /.+/ || $PREF{disallow_these_file_extensions} =~ /.+/) { die_nice(qq`Error: destination filename ("$dst") appears to have no extension.`); } } elsif(-e $fullitem_dst) { die_nice(qq`Error: there is already a $itemtype named $item in $urldst.  If you really want to overwrite it you must delete the existing $itemtype first.`); } else { my ($title, $output, $errormsg) = (); if($action eq 'move') { $title = 'Moving...'; $output = qq`

Moved $itemtype:

\n

$item

` . qq`

From:

\n

$urlsrc

` . qq`

To:

\n

$urldst

` . qq`\n` . qq`\n`; $errormsg = qq`Error while trying to move $itemtype "$item" from $urlsrc to $urldst: $!`; } else { $title = 'Renaming...'; $output = qq`

Renamed $itemtype:

\n

$item

` . qq`

To:

\n

$dst

` . qq`

In:

\n

$urlsrc

` . qq`\n` . qq`\n`; $errormsg = qq`Error while trying to rename $itemtype "$item" to "$dst" in $urlsrc: $!`; } my ($files_for_infofiles, undef) = get_items($fullitem_src); if(rename($fullitem_src, $fullitem_dst)) { my @infofile_errors = (); if($itemtype eq 'folder') { my $errors = move_all_infofiles($files_for_infofiles, $urlitem_src, $urlitem_dst); @infofile_errors = @$errors if @$errors; } else { if(-e (my $old_infofile = get_info_filename_withpath($urlitem_src)) ) { my $new_infofile = get_info_filename_withpath($urlitem_dst); rename($old_infofile, $new_infofile) or push @infofile_errors, qq`couldn't move infofile from "$old_infofile" to "$new_infofile": $!`; } } start_html_output($title, 'css'); print $output; if(@infofile_errors) { print qq`



However, there were problems moving the infofile(s):

` . qq`\n

` . join "\n

", @infofile_errors . qq`\n

`; } print_footer_links('back','list'); finish_html_output('power'); } else { die_nice($errormsg); } } } else { start_html_output("Move/Rename $itemtype", 'css'); print qq`

Move $itemtype:

\n

$item

` . qq`\n

From:

\n

$urlsrc

` . qq`\n

To:

` . qq`\n
` . qq`\n` . qq`\n` . qq`\n` . qq`\n` . qq`\n

` . qq`\n
` . qq`\n`; print qq`\n
` . qq`\n

Or:

` . qq`\n
` . qq`\n

Rename $itemtype:

\n

$item

` . qq`\n

To:

` . qq`\n
` . qq`\n` . qq`\n` . qq`\n` . qq`\n` . qq`\n

` . qq`\n
` . qq`\n`; print_footer_links('back','list'); finish_html_output('power'); } } sub convert_plus_signs_back_to_spaces_in_var_from_GET_method { s/\+/ /g for @_; } sub die_nice { my $msg = shift; start_html_output('Error', 'css'); print qq`
$msg
\n`; print_footer_links('back','list'); finish_html_output('power'); exit; } sub printd { if($PREF{debug}) { print STDERR "filechucker-debug: " . time . ": " . $_[0]; print_server_headers(); print "\n"; } } sub get_db_connection { my $dbh = DBI->connect("dbi:mysql:$PREF{dbname}", $PREF{dbuser}, $PREF{dbpass}, {AutoCommit => 1} ) or die "$0: $DBI::errstr\n"; if(!db_table_exists($dbh) || !db_table_is_right_size($dbh)) { create_db_table($dbh); } $PREF{AutoCommit} = $dbh->{AutoCommit}; return $dbh; } sub create_db_table { my $dbh = shift; my $statement = qq`CREATE TABLE $PREF{enc_uploader_table_name} ` . qq`(serial VARCHAR(150) NOT NULL PRIMARY KEY, ` . qq`progress TEXT, ` . qq`currentfile SMALLINT, ` . qq`totalfiles SMALLINT, ` . qq`totalsize INT UNSIGNED, ` . qq`start_time INT UNSIGNED); `; my $sth = $dbh->prepare($statement); $sth->execute or die "$0: couldn't create new database table $PREF{enc_uploader_table_name}: $DBI::errstr\n"; } sub db_table_exists { my $dbh = shift; my @alltables = $dbh->tables(); die "$0: couldn't get table names\n" unless @alltables; my $exists = 0; foreach my $table (@alltables) { $table =~ s/[\`'"]//g; # because $dbh->tables() returns the table-names quoted with backticks. if($table eq $PREF{enc_uploader_table_name}) { $exists = 1; last; } } return $exists; } sub db_table_is_right_size { my $dbh = shift; my $sth = $dbh->prepare("SHOW COLUMNS FROM $PREF{enc_uploader_table_name};"); $sth->execute(); my @row = $sth->fetchrow; return (scalar(@row) == 6); } sub send_email { my ($to, $from, $subj, $msg, $mimetype, $die_on_error, $attachment_hashref) = @_; $mimetype = 'text/plain' unless $mimetype; $die_on_error = $die_on_error eq 'die_on_email_error' ? 1 : 0; my $do_fork = !$die_on_error; # if we want to die on error, we can't fork, or the die() will go unreported. $do_fork = 0 if $^O =~ /MSWin32/; # Windows' fork-fu is weak. # fork here because sending mail can be slow (and can block) sometimes. # Note: if we don't set $do_fork, perl won't even evaluate the &&'s second # half, so the fork won't happen, and the else{} will. my $forkpid = (); if($do_fork && ($forkpid = fork)) { # parent } else { # child use POSIX; if($do_fork) { defined $forkpid or die "$0: fork error in send_email(): $@\n"; POSIX::setsid() unless $^O =~ /MSWin32/; close STDOUT; close STDIN; } # Wrap this in an eval{} in case MIME::Lite is missing. # Then we can have the option of setting $PREF{'disable_all_email'} # so that the site still functions, sans email. eval { require MIME::Lite; my $type = (); if($mimetype) { $type = $mimetype; } else { #my $type = $attachment_hashref ? 'multipart/mixed' : 'text/plain'; $type = $attachment_hashref ? 'multipart/mixed' : 'text/plain; charset=ISO-8859-1; format=flowed'; } my $mime_msg = MIME::Lite->new( To => $to, From => $from, Subject => $subj, Type => $type, Data => $msg ) or sub { if($die_on_error) { die "$0: error creating MIME body: $!\n"; } else { warn "$0: error creating MIME body: $!\n"; } }; if($attachment_hashref) { foreach my $key (keys %$attachment_hashref) { my $mimetype = $$attachment_hashref{$key}{mimetype}; # like 'application/x-gzip' my $filename = $$attachment_hashref{$key}{filename}; my $recommended_filename = $$attachment_hashref{$key}{recommended_filename}; $recommended_filename =~ s!^.*(\\|/)!!; # strip off any preceeding path # Attach the test file $mime_msg->attach( Type => $mimetype, Path => $filename, Filename => $recommended_filename, Disposition => 'attachment' ) or sub { if($die_on_error) { die "$0: error attaching file to email: $!\n"; } else { warn "$0: error attaching file to email: $!\n"; } }; } } $PREF{smtp_server} = enc_untaint($PREF{smtp_server}); eval { MIME::Lite->send('smtp', $PREF{'smtp_server'}, Timeout=>60); }; if($@) { if($die_on_error) { die "$0: MIME::Lite->send failed: $@\n"; } else { warn "$0: MIME::Lite->send failed: $@\n"; } } eval { $mime_msg->send; }; if($@) { if($die_on_error) { die "$0: \$mime_msg->send failed: $@\n"; } else { warn "$0: \$mime_msg->send failed: $@\n"; } } if($attachment_hashref) { foreach my $key (keys %$attachment_hashref) { unlink( $$attachment_hashref{$key}{filename} ) if $$attachment_hashref{$key}{'delete-after-sending'} eq 'yes'; } } # Perl seems to be ignoring our disable_all_email pref; I think that's because # the eval{} happens at compile-time and not runtime? # # }; if($@ && ($PREF{'disable_all_email'} =~ /no/i)) { die "$0: in send_email(): $@\n"; } # # }; if($@) # then there was a problem with MIME::Lite, so try sendmail instead. { warn "$0: error in send_email() while trying to use MIME::Lite; trying sendmail instead.\nError was:\n$@\n"; eval { open(SENDMAIL, "|$PREF{'path_to_sendmail'} -oi -t") or die "$0: Can't fork for sendmail: $!\n"; if($attachment_hashref) { print SENDMAIL qq`MIME-Version: 1.0` . qq`\nFrom: $from` . qq`\nTo: $to` . qq`\nSubject: $subj` . qq`\nContent-Type: multipart/mixed; boundary=encindboundarystring` . qq`\n` . qq`\n--encindboundarystring` . qq`\nContent-Type: text/plain` . qq`\n` . qq`\n$msg`; foreach my $key (keys %$attachment_hashref) { my $mimetype = $$attachment_hashref{$key}{mimetype}; # like 'application/x-gzip' $mimetype = 'application/octet-stream' unless $mimetype; my $filename = $$attachment_hashref{$key}{filename}; my $recommended_filename = $$attachment_hashref{$key}{recommended_filename}; $recommended_filename =~ s!^.*(\\|/)!!; # strip off any preceeding path my $atch = `uuencode $filename $filename`; # UUencode it so we can send it as an attachment print SENDMAIL qq`\n____________________` . qq`\nAttachment: $filename:` . qq`\n` . qq`\n--encindboundarystring` . qq`\nContent-Type: $mimetype; name="$filename"` . qq`\nContent-Transfer-Encoding: x-uuencode` . qq`\nContent-Disposition: attachment; filename="$recommended_filename"` . qq`\n` . qq`\n$atch` . qq`\n` . qq`\n--encindboundarystring`; } print SENDMAIL qq`\n--encindboundarystring--\n` } else # no attachment. { print SENDMAIL qq`From: $from` . qq`\nTo: $to` . qq`\nSubject: $subj` . qq`\nContent-Type: $mimetype` . qq`\n` . qq`\n$msg`; } close(SENDMAIL) or die "$0: sendmail didn't close nicely: $!\n"; }; if($@) { die "$0: Cannot send email; tried MIME::Lite and sendmail but both failed.\n$@\n"; } } if($do_fork) { exit; # exit the child process. } } } sub get_ip_and_host { my $ip = $ENV{REMOTE_ADDR}; my $host = $ENV{REMOTE_HOST}; if(!($host)) { $host = $ip; } if($host eq $ip) { use Socket; $host = gethostbyaddr(inet_aton($ip), AF_INET); } if(!($host)) { $host = $ip; } return ($ip, $host); } sub spawn_progress_server { my $kidpid = fork(); die "$0: spawn_progress_server() couldn't fork: $!\n" unless defined $kidpid; #print STDERR "fork must have worked...\n"; if($kidpid) { # parent return; } else { open(STDERR,">/dev/null"); #open(STDFOO,">>/tmp/STDFOO"); # child #print STDFOO "ipcd: in outer child\n"; my $sock = (); my $port = $PREF{base_port} - 1; while( $port < ($PREF{base_port} + $PREF{num_ports_to_use}) ) { $port++; my $logdata = time . ": $PREF{serial}: ipcd: spawn_progress_server(): trying to bind to port $port\n"; unless($DLOG{$logdata} || $dlog) { flock $dlog, 2; print $dlog $logdata; $DLOG{$logdata} = 1; flock $dlog, 8; } $sock = new IO::Socket::INET( LocalHost => 'localhost', LocalPort => $port, Proto => 'tcp', Listen => 20, Timeout => $PREF{progress_server_timeout}, Reuse => 1); last if $sock; } $sock or die "$0: spawn_progress_server(): couldn't bind to any socket: :$!\n"; my $logdata = time . ": $PREF{serial}: ipcd: spawn_progress_server(): bound to port $port\n"; unless($DLOG{$logdata} || $dlog) { flock $dlog, 2; print $dlog $logdata; $DLOG{$logdata} = 1; flock $dlog, 8; } #print STDFOO "ipcd: bound to port $port\n"; my($client, $msg) = (); while($client = $sock->accept()) { # got a client connection. while(defined($msg = <$client>)) { chomp $msg; if($msg =~ /^enc-get-progress-for (\d+)$/) { my $serial = $1; if($PREF{uploaddata}{$serial}{start_time}) { print $client $serial . ',' . $PREF{uploaddata}{$serial}{progress} . ',' . $PREF{uploaddata}{$serial}{currentfile} . ',' . $PREF{uploaddata}{$serial}{totalfiles} . ',' . $PREF{uploaddata}{$serial}{totalsize} . ',' . $PREF{uploaddata}{$serial}{start_time} . "\n"; } else { print $client 'ENOTMINE' . "\n"; } } elsif($msg =~ /^enc-set-progress-for (\d+): (.+),(\d+),(\d+),(\d+),(\d+)$/) { my $serial = $1; $PREF{uploaddata}{$serial}{progress} = $2; $PREF{uploaddata}{$serial}{currentfile} = $3; $PREF{uploaddata}{$serial}{totalfiles} = $4; $PREF{uploaddata}{$serial}{totalsize} = $5; $PREF{uploaddata}{$serial}{start_time} = $6; print $client "UPDATE-OK-FOR $serial\n"; } } } exit; # quit main progress_server() child. } } sub ask_progress_server($) { my $serial = shift; use IO::Socket; my ($sock,$response) = (); my $port = $PREF{base_port} - 1; while( $port < ($PREF{base_port} + $PREF{num_ports_to_use}) ) { $port++; my $logdata = time . ": $serial: ipcd: client: ask_progress_server(): trying to connect to port $port\n"; unless($DLOG{$logdata} || $dlog) { flock $dlog, 2; print $dlog $logdata; $DLOG{$logdata} = 1; flock $dlog, 8; } #print STDERR "ipcd: client: ask_progress_server(): trying to connect to port $port\n"; $sock = new IO::Socket::INET( PeerAddr => 'localhost', PeerPort => $port, Timeout => 3, Proto => 'tcp'); next unless $sock; my $logdata = time . ": $serial: ipcd: client: ask_progress_server(): connected to port $port\n"; unless($DLOG{$logdata} || $dlog) { flock $dlog, 2; print $dlog $logdata; $DLOG{$logdata} = 1; flock $dlog, 8; } #print STDERR "ipcd: client: ask_progress_server(): connected to port $port\n"; #$sock or die "no socket :$!"; print $sock "enc-get-progress-for $serial\n"; $response = scalar <$sock>; close $sock; chomp $response; my $logdata = time . ": $serial: ipcd: client: ask_progress_server(): got response=$response\n"; unless($DLOG{$logdata} || $dlog) { flock $dlog, 2; print $dlog $logdata; $DLOG{$logdata} = 1; flock $dlog, 8; } last if $response =~ /^$serial,.+,\d+,\d+,\d+,\d+$/; } die "$0: ask_progress_server(): couldn't get response from progress server\n" unless $response; my ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = ($response =~ /^$serial,(.+),(\d+),(\d+),(\d+),(\d+)$/); return ($progress,$currentfile,$totalfiles,$totalsize,$start_time); } sub tell_progress_server($) { my $serial = shift; my $data = shift; use IO::Socket; my ($sock,$response) = (); my $port = $PREF{base_port} - 1; while( $port < ($PREF{base_port} + $PREF{num_ports_to_use}) ) { $port++; #print STDERR "ipcd: client: trying to connect to port $port\n"; $sock = new IO::Socket::INET( PeerAddr => 'localhost', PeerPort => $port, Timeout => 3, Proto => 'tcp'); next unless $sock; #print STDERR "ipcd: client: connected to port $port\n"; #$sock or die "no socket :$!"; print $sock "enc-set-progress-for ${serial}: $data\n"; $response = scalar <$sock>; close $sock; chomp $response; #my $logdata = time . ": $serial: ipcd: client: tell_progress_server(): got response=$response\n"; #unless($DLOG{$logdata} || $dlog) { flock $dlog, 2; print $dlog $logdata; $DLOG{$logdata} = 1; flock $dlog, 8; } last if $response =~ /^UPDATE-OK-FOR $serial$/; } #die "$0: ask_progress_server(): couldn't get response from progress server\n" unless $response; #my ($progress,$currentfile,$totalfiles,$totalsize,$start_time) = ($response =~ /^$serial,(\d+),(\d+),(\d+),(\d+),(\d+)$/); #return ($progress,$currentfile,$totalfiles,$totalsize,$start_time); } sub create_file_if_DNE { my $file = shift; my $mode = shift; return if -T $file; open(my $newfh,">$file") or die "$0: couldn't create new file $file: $!\n"; close $newfh or die "$0: couldn't close $file after creating it: $!\n"; if($mode) { chmod($mode,$file) or die "$0: couldn't chmod file \"$file\" with mode \"$mode\": $!\n"; } } sub create_dir_if_DNE { my $dir = shift; my $mode = shift; return if -d $dir; mkdir($dir,0777) or die "$0: couldn't create dir $dir: $!\n"; if($mode) { chmod($mode,$dir) or die "$0: couldn't chmod dir \"$dir\" with mode \"$mode\": $!\n"; } } sub delete_directory { my $dir = shift; my ($files, $subfolders) = get_items($dir); my @infofile_errors = (); # first delete the files. # foreach my $file (@$files) { $file = enc_untaint($file, 'keep_path'); # can never be too safe... if($file =~ /^$PREF{uploaded_files_realpath}/) { if($PREF{uploaded_files_realpath} =~ /^$PREF{DOCROOT}/) { #print STDERR "unlinking $file\n"; unlink($file) or die qq`$0: couldn't unlink (delete) file "$file": $!\n`; my ($file_with_urlpath) = ($file =~ /^$PREF{DOCROOT}(.+$)/); if(-e (my $infofile = get_info_filename_withpath($file_with_urlpath)) ) { #print STDERR "deleting $infofile\n"; unlink($infofile) or push @infofile_errors, qq`$0: couldn't unlink (delete) infofile "$infofile": $!\n`; } } else { die qq`$0: refusing to unlink "$file" because \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}) does not appear to be within \$PREF{DOCROOT} ($PREF{DOCROOT}).\n`; } } else { die qq`$0: refusing to unlink "$file" because it doesn't appear to be within \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}).\n`; } } # next delete the folders. # start with the longest pathname to ensure we delete subdirectories before parent directories. # foreach my $folder (sort { length($b) <=> length($a) } @$subfolders) { $folder = enc_untaint($folder, 'keep_path'); # can never be too safe... if($folder =~ /^$PREF{uploaded_files_realpath}/) { if($PREF{uploaded_files_realpath} =~ /^$PREF{DOCROOT}/) { #print STDERR "rmdir-ing $folder\n"; rmdir($folder) or die qq`$0: couldn't rmdir (delete) directory "$folder": $!\n`; } else { die qq`$0: refusing to rmdir "$folder" because \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}) does not appear to be within \$PREF{DOCROOT} ($PREF{DOCROOT}).\n`; } } else { die qq`$0: refusing to rmdir "$folder" because it doesn't appear to be within \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}).\n`; } } # finally, delete the requested folder itself. # # can never be too safe... if($dir =~ /^$PREF{uploaded_files_realpath}/) { $dir = enc_untaint($dir, 'keep_path'); if($PREF{uploaded_files_realpath} =~ /^$PREF{DOCROOT}/) { #print STDERR "rmdir-ing $dir\n"; rmdir($dir) or die qq`$0: couldn't rmdir (delete) directory "$dir": $!\n`; } else { die qq`$0: refusing to rmdir "$dir" because \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}) does not appear to be within \$PREF{DOCROOT} ($PREF{DOCROOT}).\n`; } } else { die qq`$0: refusing to rmdir "$dir" because it doesn't appear to be within \$PREF{uploaded_files_realpath} ($PREF{uploaded_files_realpath}).\n`; } return \@infofile_errors; } sub count_items { my $dir = shift; return scan_dir_for_contents($dir, 'return_the_item_counts'); } sub get_items { my $dir = shift; return scan_dir_for_contents($dir, 'return_the_items_themselves'); } sub scan_dir_for_contents { my $dir = shift; my $mode = shift; my @all_dirs = ($dir); my @all_subdirs = get_all_subdirs($dir); for(@all_subdirs) { push @all_dirs, $dir . '/' . $_; } my @all_files = (); foreach my $subdir (@all_dirs) { opendir(my $dirh, $subdir) or die "$0: couldn't open directory $subdir: $!\n"; my @files = grep { ! -d "$subdir/$_" } readdir($dirh); closedir $dirh or die "$0: couldn't close directory $subdir: $!\n"; for(@files) { push @all_files, $subdir . '/' . $_; } #print STDERR "subdir $subdir contains:" . join ", ", @files; } if($mode eq 'return_the_items_themselves') { my @all_subdirs_with_paths = (); for(@all_subdirs) { push @all_subdirs_with_paths, $dir . '/' . $_; } return (\@all_files, \@all_subdirs_with_paths); } elsif($mode eq 'return_the_item_counts') { return ($#all_files + 1, $#all_subdirs + 1); } } sub get_all_subdirs { my $dir = shift; opendir(my $dirh, $dir) or die "$0: couldn't open directory $dir: $!\n"; my @dirs = sort grep { -d "$dir/$_" && -w "$dir/$_" && !/^\.$/ && !/\.{2}/ } readdir($dirh); closedir $dirh or die "$0: couldn't close directory $dir: $!\n"; my @subdirs = (); # now recurse through everything below this point. foreach my $level1dir (@dirs) { foreach my $level2dir (get_all_subdirs("$dir/$level1dir")) { push @subdirs, "$level1dir/$level2dir"; } } push @dirs, @subdirs; return @dirs; } sub get_userdir { my $userdir = (); if($PREF{enable_userdir_from_cookie} =~ /yes/i) { if(my $ucname = get_cookie($PREF{userdir_cookie_name})) { $userdir = $ucname; } else { exit_with_message("Error: not logged in", qq`Perhaps you need to go home and log in first?`); } } elsif($PREF{enable_userdir_on_url} =~ /yes/i) { if($qs =~ /(?:^|&)userdir=(.+?)(?:&|$)/) { $userdir = $1; } else { if($PREF{url_without_userdir_is_error} =~ /yes/i) { exit_with_message("Error: malformed URL", qq`You need to pass userdir=yourusername on the URL.`); } # if no userdir was passed, and the webmaster doesn't want that to be # an error, then we just have to return null and use the top-level # dir for this upload. } } unless($PREF{allow_unsafe_userdir_names} =~ /yes/i) { $userdir = enc_untaint($userdir) if $userdir; } return $userdir; } sub item_is_allowed_to_be_displayed { my $item = shift; my $allowed = 1; if($PREF{only_show_files_with_these_extensions} =~ /(.+)/) { my %allowed_extensions = map { lc($_) => 1 } split(/[,\s]+/, $PREF{only_show_files_with_these_extensions}); my ($this_items_extension) = ($item =~ /.*(\..+)$/); die qq`$0: could not determine the extension for item "$item".\n` unless ($this_items_extension || $PREF{allow_files_without_extensions} =~ /yes/i); unless( $allowed_extensions{lc($this_items_extension)} ) { $allowed = 0; } } if($PREF{hide_files_with_these_extensions} =~ /(.+)/) { my %disallowed_extensions = map { lc($_) => 1 } split(/[,\s]+/, $PREF{hide_files_with_these_extensions}); my ($this_items_extension) = ($item =~ /.*(\..+)$/); die qq`$0: could not determine the extension for item "$item".\n` unless ($this_items_extension || $PREF{allow_files_without_extensions} =~ /yes/i); if( $disallowed_extensions{lc($this_items_extension)} ) { $allowed = 0; } } return $allowed; } sub clean_up_filename { s/\s+/_/g for @_; s/[^0-9A-Za-z\._-]//g for @_; } sub create_info_file { my ($filename_with_urlpath, $basename, $filesize, $serial, $comments) = @_; $filesize = format_filesize_nicely($filesize); my $infofile = get_info_filename_withpath($filename_with_urlpath); # The name of the infofile is taken from the name of the uploaded file itself, # and it gets serialized along with the name of the uploaded file. So the only # way that the infofile could already exist is if someone deleted the uploaded # file outside of FileChucker (via shell, FTP, etc) and left the infofile behind. # In that case, the infofile no longer has an uploaded file to be attached to, so # we'd want to overwrite it anyway. So don't die here if the infofile already # exists. # #die_nice(qq`Error: the infofile ($infofile) for this upload ($filename_with_urlpath) already exists.`) if -e $infofile; $comments = ($PREF{display_comments_box_for_uploads} =~ /yes/i && $PREF{store_info_about_each_upload} =~ /yes/i) ? qq`Uploader's comments:\n\n$comments\n`: undef; $comments =~ s/\r\n/\n/g; my $start_etime = $PREF{uploaddata}{$serial}{start_time}; my $start_ampm = lc(strftime("%p", localtime($start_etime))); my $start_stamp = strftime("%a%b%d,%Y,%I:%M", localtime($start_etime)).$start_ampm; my $end_etime = time; my $end_ampm = lc(strftime("%p", localtime($end_etime))); my $end_stamp = strftime("%a%b%d,%Y,%I:%M", localtime($end_etime)).$end_ampm; my $elapsed_time_in_seconds = $end_etime - $start_etime; my $elapsed_time_in_minutes = $elapsed_time_in_seconds / 60; my $elapsed_time_in_hours = $elapsed_time_in_seconds / 3600; s/(.*\.\d).*/$1/ for ($elapsed_time_in_minutes, $elapsed_time_in_hours); my ($ip,$host) = get_ip_and_host(); my $uploadsize = format_filesize_nicely($ENV{CONTENT_LENGTH}); open(my $outfh, ">$infofile") or die "$0: couldn't create infofile $infofile: $!\n"; flock $outfh, 2; seek $outfh, 0, 0; print $outfh qq`Original filename: $filename_with_urlpath` . qq`\nFile size: $filesize` . qq`\nUploaded in a group of this many files: $PREF{uploaddata}{$serial}{totalfiles}` . qq`\nTotal upload size: $uploadsize` . qq`\nUpload serial number: $serial` . qq`\n` . qq`\nUploader's IP address: $ip` . qq`\nUploader's hostname: $host` . qq`\nUploader's user-dir: ` . (get_userdir() ? get_userdir() : '(none)') . qq`\nUploader's user-agent: $ENV{HTTP_USER_AGENT}` . qq`\n` . qq`\nStart time (for entire upload): $start_stamp` . qq`\nEnd time (for entire upload): $end_stamp` . qq`\n` . qq`\nStart etime (for entire upload): $start_etime` . qq`\nEnd etime (for entire upload): $end_etime` . qq`\n` . qq`\nElapsed time in seconds (for entire upload): $elapsed_time_in_seconds` . qq`\nElapsed time in minutes (for entire upload): $elapsed_time_in_minutes` . qq`\nElapsed time in hours (for entire upload): $elapsed_time_in_hours` . qq`\n` . qq`\n$comments` . qq`\n`; truncate $outfh, tell $outfh; close $outfh or die "$0: couldn't close infofile $infofile after creating it: $!\n"; chmod 0666, $infofile; } sub format_filesize_nicely { my $size = shift; $size = 0 unless $size; $size = $size > 999999 ? onedecimal($size/(1024*1024)) . ' MB' : int($size/1024) . ' KB'; return $size; } sub remove_reserved_strings { s/-\.-\.-/_._._/g for @_; # because we use -.-.- as a directory-separation symbol in our infofiles' filenames. } sub show_fileinfo { print_needlogin_error() unless user_has_info_rights(); my $path = shift; my $file = shift; enc_urldecode($path, $file); $file = enc_untaint($file); $path = enc_untaint($path, 'keep_path') if $path; #clean_up_filename($dst) if $PREF{clean_up_filenames} =~ /yes/i; #remove_reserved_strings($dst); my $infofile = get_info_filename_withpath("$PREF{uploaded_files_urlpath}/$path$file"); die_nice(qq`Error: couldn't find info file for $path$file.`) unless -e $infofile; open(my $infh, "<$infofile") or die "$0: couldn't open infofile $infofile for reading: $!\n"; flock $infh, 1; seek $infh, 0, 0; my @contents = <$infh>; close $infh or die "$0: couldn't close infofile $infofile after reading: $!\n"; start_html_output('Upload Info', 'css', 'js'); print qq`

Upload Info

\n
\n\n`;
	my $closed_pretag_already = 0;
	foreach my $line (@contents)
	{
		if($line =~ /^(Uploader's user-agent: )(.*)$/)
		{
			print qq`
\n
$1$2
\n
`;
		}
		elsif($line =~ /^(Uploader's comments:)$/)
		{
			print qq`
\n
$1`; $closed_pretag_already = 1; } elsif($closed_pretag_already) { print '
' . $line; } else { print $line; } } print qq`\n
` if $closed_pretag_already; print qq`\n` unless $closed_pretag_already; print qq`\n\n`; print_footer_links('back', 'list'); print_powered_by(); finish_html_output(); } sub get_info_filename_withpath { my $filename = shift; my $infofile = $filename; $infofile =~ s![/\\]{2,}!/!g; $infofile =~ s![/\\]!-.-.-!g; my $slash = $PREF{logpath} =~ m!/$! ? undef : '/'; $infofile = $PREF{logpath} . $slash . $infofile . '.info.txt'; return $infofile; } sub move_all_infofiles($$$) { my $files = shift; # arrayref [ from: my ($files, $subfolders) = get_items($srcdir); ] my $srcdir_with_url = shift; my $dstdir_with_url = shift; my @errors = (); foreach my $file (@$files) { $file = enc_untaint($file, 'keep_path'); my ($file_with_urlpath) = ($file =~ /($PREF{uploaded_files_urlpath}.*$)/); #print STDERR "file_with_urlpath: $file_with_urlpath\n"; my $old_infofile = get_info_filename_withpath($file_with_urlpath); my ($file_with_old_path_removed) = ($file =~ /$srcdir_with_url(.+$)/); #print STDERR "file_with_old_path_removed: $file_with_old_path_removed\n"; my $file_with_new_path_added = $dstdir_with_url . $file_with_old_path_removed; #print STDERR "file_with_new_path_added: $file_with_new_path_added\n"; my $new_infofile = get_info_filename_withpath($file_with_new_path_added); if(-e $old_infofile) { rename($old_infofile, $new_infofile) or push @errors, qq`couldn't move infofile from "$old_infofile" to "$new_infofile": $!`; } } return \@errors; } sub show_link_to_uploads { return ( $PREF{show_link_to_uploads_for_strangers} =~ /yes/i || ($PREF{show_link_to_uploads_for_members} =~ /yes/i && user_is_logged_in()) || ($PREF{show_link_to_uploads_for_admins} =~ /yes/i && admin_is_logged_in()) ); } sub interpolate_vars_from_URL_and_cookies { my %url_vars = (); while($qs =~ /(?:^|&)(.+?)=(.*?)(?:&|$)/g) { my ($var,$value) = ($1, $2); enc_urldecode($value); $url_vars{$var} = $value; } s/\$URL\{(.+?)\}/$url_vars{$1}/g for @_; s/\$COOKIE\{(.+?)\}/get_cookie($1)/eg for @_; }