feat: use the new buckets

The tarballs are currently getting put into /bundles, but maybe this
isn't necessary
pull/1480/head
Oliver Eyton-Williams 4 years ago
parent c6264eb1bf
commit 7713074c93
No known key found for this signature in database
GPG Key ID: C4B00673186643C5

@ -191,7 +191,7 @@ class DocsCLI < Thor
puts '[S3] Begin syncing.' puts '[S3] Begin syncing.'
docs.each do |doc| docs.each do |doc|
puts "[S3] Syncing #{doc.path}..." puts "[S3] Syncing #{doc.path}..."
cmd = "aws s3 sync #{File.join(Docs.store_path, doc.path)} s3://devdocs-assets/#{doc.path} --delete --profile devdocs" cmd = "aws s3 sync #{File.join(Docs.store_path, doc.path)} s3://devdocs-staging-documents/#{doc.path} --delete --profile devdocs"
cmd << ' --dryrun' if options[:dryrun] cmd << ' --dryrun' if options[:dryrun]
system(cmd) system(cmd)
end end
@ -199,20 +199,16 @@ class DocsCLI < Thor
# Upload packages to dl.devdocs.io (used by the "thor docs:download" command) # Upload packages to dl.devdocs.io (used by the "thor docs:download" command)
# TODO(MIGRATION): replace this with an S3 bucket upload. # TODO(MIGRATION): replace this with an S3 bucket upload.
puts '[MaxCDN] Begin uploading.' puts '[S3 bundle] Begin uploading.'
Net::SFTP.start('ftp.devdocs-dl.devdocs.netdna-cdn.com', ENV['DEVDOCS_DL_USERNAME'], password: ENV['DEVDOCS_DL_PASSWORD']) do |sftp|
docs.each do |doc| docs.each do |doc|
filename = "#{doc.path}.tar.gz" filename = "#{doc.path}.tar.gz"
print "[MaxCDN] Uploading #{filename}..." print "[S3 bundle] Uploading #{filename}..."
if options[:dryrun] cmd = "aws s3 cp #{File.join(Docs.store_path, filename)} s3://devdocs-staging-downloads/bundles/#{filename} --profile devdocs"
print "\n" cmd << ' --dryrun' if options[:dryrun]
else system(cmd)
sftp.upload! File.join(Docs.store_path, filename), File.join('', 'public_html', filename)
print " OK\n"
end
end
end end
puts '[MaxCDN] Done uploading.' puts '[S3 bundle] Done uploading.'
end end
desc 'commit', '[private]' desc 'commit', '[private]'
@ -245,7 +241,7 @@ class DocsCLI < Thor
FileUtils.mkpath(dir) FileUtils.mkpath(dir)
['index.json', 'meta.json'].each do |filename| ['index.json', 'meta.json'].each do |filename|
json = "https://docs.devdocs.io/#{doc.path}/#{filename}?#{time}" json = "https://documents.devdocs.in/#{doc.path}/#{filename}?#{time}"
begin begin
open(json) do |file| open(json) do |file|
mutex.synchronize do mutex.synchronize do
@ -342,7 +338,7 @@ class DocsCLI < Thor
def download_doc(doc) def download_doc(doc)
target_path = File.join(Docs.store_path, doc.path) target_path = File.join(Docs.store_path, doc.path)
open "http://dl.devdocs.io/#{doc.path}.tar.gz" do |file| open "https://downloads.devdocs.in/bundles/#{doc.path}.tar.gz" do |file|
FileUtils.mkpath(target_path) FileUtils.mkpath(target_path)
file.close file.close
tar = UnixUtils.gunzip(file.path) tar = UnixUtils.gunzip(file.path)

Loading…
Cancel
Save