[Top][All Lists]
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[Getht-commit] arch commit: getht--mainline--0.1--patch-46
From: |
Nick |
Subject: |
[Getht-commit] arch commit: getht--mainline--0.1--patch-46 |
Date: |
Fri, 2 May 2008 15:33:05 +0100 |
User-agent: |
Mutt/1.5.16 (2007-06-09) |
Revision: getht--mainline--0.1--patch-46
Archive: address@hidden
Creator: Nick White <address@hidden>
Date: Fri May 2 15:32:27 BST 2008
Standard-date: 2008-05-02 14:32:27 GMT
Modified-files: src/download.c src/getht.c
New-patches: address@hidden/getht--mainline--0.1--patch-46
Summary: Fixed resume bug, improved download output
Keywords:
Fixed bug in resume which caused forced redownloads to create invalid pdfs
Changed download output to ensure one line per download (rather than a separate
one for progress)
Changed download output to display name of file rather than url downloading
Removed obselete testfile variable
Changed variable name of filename->filepath
M src/download.c
M src/getht.c
* modified files
--- orig/src/download.c
+++ mod/src/download.c
@@ -43,17 +43,18 @@
extern char proxy_pass[STR_MAX];
extern CURL *main_curl_handle;
-int save_file(CURL *curl_handle, char *uri, char *filepath, long resume_offset)
+int save_file(CURL *curl_handle, char *uri, char *filepath, char *filetitle,
long resume_offset)
/* Save the file *uri to *filepath */
{
- printf("Downloading %s\n",uri);
+ printf("Downloading %s ",filetitle);
+ fflush(stdout);
if(!curl_handle)
curl_handle = main_curl_handle;
if(curl_handle) {
FILE *file;
- file = fopen(filepath, "a");
+ file = fopen(filepath, resume_offset?"a":"w");
if(!file)
{
fprintf(stderr,"Error: cannot open file %s for
writing.\n",filepath);
@@ -100,7 +101,7 @@
fclose(file);
- printf("\n");
+ printf("\rDownloaded %s \n",filetitle);
}
else {
fprintf(stderr,"Error: curl failed to initialise.\n");
@@ -126,7 +127,7 @@
else
frac = 0;
- printf("\rDownload progress: %3.0lf%% ", frac);
+ printf("\b\b\b\b\b\b\b: %3.0lf%% ", frac);
fflush(stdout);
return 0;
@@ -185,12 +186,12 @@
sec * cur_section;
char newdir[STR_MAX];
char filename[STR_MAX];
- FILE * testfile;
+ char filepath[STR_MAX];
snprintf(newdir,STR_MAX,"%s/%i_%i-%i",directory,
issue->date.year,issue->date.firstmonth,issue->date.lastmonth);
- printf("Downloading issue entitled '%s' to '%s'\n",issue->title,newdir);
+ printf("Downloading %s to %s\n",issue->title, newdir);
if(!opendir(newdir))
if(mkdir(newdir, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH |
S_IXOTH))
@@ -204,12 +205,13 @@
{
cur_section = issue->section[count];
- snprintf(filename,STR_MAX,"%s/section_%i.pdf", newdir,
cur_section->number);
+ snprintf(filename,STR_MAX,"section_%i.pdf",
cur_section->number);
+ snprintf(filepath,STR_MAX,"%s/%s", newdir, filename);
if(!force){
struct stat fileinfo;
/* see if local file exists */
- if(stat(filename, &fileinfo))
- save_file(curl_handle, cur_section->uri,
filename, 0);
+ if(stat(filepath, &fileinfo))
+ save_file(curl_handle, cur_section->uri,
filepath, filename, 0);
else
{
/* get size of local file */
@@ -222,12 +224,12 @@
/* if size of local file != size of remote
file, resume */
if(remotesize > 0 && localsize < remotesize)
- save_file(curl_handle,
cur_section->uri, filename, localsize);
+ save_file(curl_handle,
cur_section->uri, filepath, filename, localsize);
else
printf("Skipping download of completed
section %i\n", cur_section->number);
}
}
else
- save_file(curl_handle, cur_section->uri, filename, 0);
+ save_file(curl_handle, cur_section->uri, filepath,
filename, 0);
}
}
--- orig/src/getht.c
+++ mod/src/getht.c
@@ -203,7 +203,7 @@
int update_contents_files()
/* Returns 0 on success, 1 on failure */
{
- if(save_file(NULL, issue_uri, issue_xml, 0))
+ if(save_file(NULL, issue_uri, issue_xml, "contents", 0))
return 1;
else
return 0;
[Prev in Thread] |
Current Thread |
[Next in Thread] |
- [Getht-commit] arch commit: getht--mainline--0.1--patch-46,
Nick <=