From ac4fd31d4a5d8f20742a89582571ad8ad88937f8 Mon Sep 17 00:00:00 2001 From: Miguel Angel Reina Ortega <miguelangel.reinaortega@etsi.org> Date: Tue, 28 Nov 2023 07:54:01 +0100 Subject: [PATCH] Adding job to remove links from the web pages (needs to be run from web after deleting a tag) --- .gitlab-ci.yml | 24 ++++++++++++++++++++++-- publish_on_pages.sh | 14 ++++++++------ updateIndex.py | 36 +++++++++++++++++++++++++----------- 3 files changed, 55 insertions(+), 19 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9ff93c7..c860d89 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -80,8 +80,8 @@ Publish spec: pages: stage: web when: on_success - only: - - tags + rules: + -if: $CLEAN_WEB_PAGES == 'false' || $CLEAN_WEB_PAGES == 'true' && $CI_COMMIT_TAG before_script: - | curl "${CI_API_V4_URL}/projects/$TOOLS_SCRIPTS_PROJECT_ID/repository/files/publish_on_pages%2Esh/raw?ref=master" >> publish_on_pages.sh @@ -93,6 +93,26 @@ pages: script: - echo 'Publishing on pages' - ./publish_on_pages.sh updateIndex.py $CI_PAGES_URL ${CI_PROJECT_NAME} $CI_COMMIT_REF_NAME index.html $CLEAN_WEB_PAGES + artifacts: + paths: + - public + +pages: + stage: web + when: on_success + rules: + - if: $CLEAN_WEB_PAGES != "false" && $CLEAN_WEB_PAGES != "true" && $CI_PIPELINE_SOURCE == "web" + before_script: + - | + curl "${CI_API_V4_URL}/projects/$TOOLS_SCRIPTS_PROJECT_ID/repository/files/publish_on_pages%2Esh/raw?ref=master" >> publish_on_pages.sh + - chmod +x publish_on_pages.sh + - | + curl "${CI_API_V4_URL}/projects/$TOOLS_SCRIPTS_PROJECT_ID/repository/files/updateIndex%2Epy/raw?ref=master" >> updateIndex.py + - | + curl "${CI_API_V4_URL}/projects/$TOOLS_SCRIPTS_PROJECT_ID/repository/files/index%2Ehtml/raw?ref=master" >> index.html + script: + - echo 'Publishing on pages' + - ./publish_on_pages.sh updateIndex.py $CI_PAGES_URL ${CI_PROJECT_NAME} $CLEAN_WEB_PAGES index.html $CLEAN_WEB_PAGES artifacts: paths: - public \ No newline at end of file diff --git a/publish_on_pages.sh b/publish_on_pages.sh index 2aa41d2..2f6a269 100644 --- a/publish_on_pages.sh +++ b/publish_on_pages.sh @@ -18,15 +18,17 @@ echo "------ Add/update content --------" if [ $6 == 'true' ]; then echo 'Removing all web pages content...' rm -r public/*; -else +elif [ $6 == 'false' ]; then ls public/ + mkdir -p "public/$4" + cp -r -f "$3_$4.docx" public/$4 + docker run --rm -v $(pwd):/tmp -w /tmp "$DOCKER_IMAGE" pip install bs4 && python3 $1 "$3_$4" "$4/$3_$4.docx" "$5" +else + echo 'Removing entry for $6...' + rm -r -f public/$6 + docker run --rm -v $(pwd):/tmp -w /tmp "$DOCKER_IMAGE" pip install bs4 && python3 $1 -re "$3_$6" "$4/$3_$4.docx" "$5" fi -mkdir -p "public/$4" -cp -r -f "$3_$4.docx" public/$4 - -docker run --rm -v $(pwd):/tmp -w /tmp "$DOCKER_IMAGE" pip install bs4 && python3 $1 "$3_$4" "$4/$3_$4.docx" "$5" - echo "------ Zip the content and publish the zip again ------" zip -r "content.zip" "public" mv "content.zip" "public/" diff --git a/updateIndex.py b/updateIndex.py index acef650..e71cc58 100644 --- a/updateIndex.py +++ b/updateIndex.py @@ -24,17 +24,29 @@ def updateIndex(args:argparse.Namespace) -> None: # Analyze index content with BeautifulSoup soup = BeautifulSoup(html_content, 'html.parser') - # Find document list - # Assume there's a list (<ul>) with id "document-list" - document_list = soup.find('ul', id='document-list') - - # Add the new element <li> for the new document - if args.documentLink is not None and args.documentName is not None: - li = soup.new_tag('li') - a = soup.new_tag('a', href=args.documentLink) - a.string = args.documentName - li.append(a) - document_list.append(li) + if args.removeEntry: + # Remove the element <li> for the document + if args.documentLink is not None and args.documentName is not None: + # Find the element to delete + element_to_delete = soup.find('li', text=args.documentName) + # Check if the element is found before attempting to delete it + if element_to_delete: + # Remove the element from the HTML tree + element_to_delete.decompose() + else: + print("Element not found.") + + else: + # Find document list + # Assume there's a list (<ul>) with id "document-list" + document_list = soup.find('ul', id='document-list') + # Add the new element <li> for the new document + if args.documentLink is not None and args.documentName is not None: + li = soup.new_tag('li') + a = soup.new_tag('a', href=args.documentLink) + a.string = args.documentName + li.append(a) + document_list.append(li) # Save the updated index.html file with open('public/index.html', 'w', encoding='utf-8') as file: @@ -48,6 +60,8 @@ if __name__ == '__main__': # Parse command line arguments parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('-re', '--removeEntry', action='store_true', required=False, default=False, + help="Indicate to remove the given entry") parser.add_argument('documentName', help = 'Document name to add to the list') parser.add_argument('documentLink', help = 'Document location to add to the list') parser.add_argument('indexTemplate', help = 'Index template document') -- GitLab