')
self._cmd += ' "' + arg + '"'
else:
self._cmd += cmd_arg(arg)
- @property
- def message(self):
+ def get_server_details(self):
# Fetch the server details like hostname, port, roles etc
s = Server.query.filter_by(
id=self.sid, user_id=current_user.id
).first()
+ return s.name, s.host, s.port
+
+ @property
+ def message(self):
+ # Fetch the server details like hostname, port, roles etc
+ name, host, port = self.get_server_details()
+
return _(
"Copying table data '{0}.{1}' on database '{2}' "
"and server ({3}:{4})"
@@ -119,19 +127,39 @@ class IEMessage(IProcessDesc):
html.safe_str(self.schema),
html.safe_str(self.table),
html.safe_str(self.database),
- html.safe_str(s.host),
- html.safe_str(s.port)
+ html.safe_str(host),
+ html.safe_str(port)
)
@property
def type_desc(self):
- return _("Copying table data")
+ _type_desc = _("Import - ") if self.is_import else _("Export - ")
+ return _type_desc + _("Copying table data")
+
+ @property
+ def current_storage_dir(self):
+
+ if config.SERVER_MODE:
+ path = os.path.realpath(self.bfile)
+ if get_storage_directory() < path:
+ storage_directory = os.path.basename(get_storage_directory())
+ start = path.index(storage_directory)
+ end = start + (len(storage_directory))
+
+ last_dir = os.path.dirname(path[end:])
+ else:
+ last_dir = '\\'
+
+ else:
+ last_dir = os.path.dirname(self.bfile) \
+ if os.path.isfile(self.bfile) \
+ else self.bfile
+
+ return None if self.is_import else last_dir
def details(self, cmd, args):
# Fetch the server details like hostname, port, roles etc
- s = Server.query.filter_by(
- id=self.sid, user_id=current_user.id
- ).first()
+ name, host, port = self.get_server_details()
res = ''
res += _(
@@ -142,9 +170,9 @@ class IEMessage(IProcessDesc):
html.safe_str(self.table),
html.safe_str(self.database),
"{0} ({1}:{2})".format(
- html.safe_str(s.name),
- html.safe_str(s.host),
- html.safe_str(s.port)
+ html.safe_str(name),
+ html.safe_str(host),
+ html.safe_str(port)
)
)
@@ -304,8 +332,7 @@ def create_import_export_job(sid):
if not _file:
return bad_request(errormsg=_('Please specify a valid file'))
-
- if IS_WIN:
+ elif IS_WIN:
_file = _file.replace('\\', '/')
data['filename'] = _file
@@ -328,14 +355,22 @@ def create_import_export_job(sid):
args = ['--command', query]
try:
+
+ io_params = {
+ 'sid': sid,
+ 'schema': data['schema'],
+ 'table': data['table'],
+ 'database': data['database'],
+ 'is_import': data['is_import'],
+ 'filename': data['filename'],
+ 'storage': storage_dir,
+ 'utility': utility
+ }
+
p = BatchProcess(
desc=IEMessage(
- sid,
- data['schema'],
- data['table'],
- data['database'],
- storage_dir,
- utility, *args
+ *args,
+ **io_params
),
cmd=utility, args=args
)
diff --git a/web/pgadmin/tools/import_export/tests/test_batch_process.py b/web/pgadmin/tools/import_export/tests/test_batch_process.py
new file mode 100644
index 000000000..e93ac007a
--- /dev/null
+++ b/web/pgadmin/tools/import_export/tests/test_batch_process.py
@@ -0,0 +1,238 @@
+##########################################################################
+#
+# pgAdmin 4 - PostgreSQL Tools
+#
+# Copyright (C) 2013 - 2020, The pgAdmin Development Team
+# This software is released under the PostgreSQL Licence
+#
+##########################################################################
+
+from pgadmin.misc.bgprocess.processes import BatchProcess, IProcessDesc, \
+ current_app
+from pgadmin.tools.import_export import IEMessage
+from pgadmin.utils.route import BaseTestGenerator
+from pickle import dumps, loads
+from unittest.mock import patch, MagicMock
+
+
+class BatchProcessTest(BaseTestGenerator):
+ """Test the BatchProcess class"""
+ scenarios = [
+ ('When export file with default options',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres',
+ args=[
+ ' --command',
+ '\\copy {0}.{1} ({2},{3}) TO \'{4}\' CSV '
+ 'QUOTE {5} ESCAPE \'\'\'\';'
+ ],
+ cmd='import_export'
+ ),
+ params=dict(
+ filename='test_export_file.csv',
+ format='csv',
+ is_import=False,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='postgres',
+ columns=['test_col_1', 'test_col_2'],
+ icolumns=[],
+ schema="export_test_schema",
+ table="export_test_table",
+ storage='/'
+ ),
+ url='/import_export/job/{0}',
+ expected_cmd_opts=['--command', 'copy', 'TO',
+ 'export_test_schema', 'export_test_table'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )),
+ ('When import file with default options',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_import_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres',
+ args=[
+ ' --command',
+ '\\copy {0}.{1} ({2},{3}) FROM \'{4}\' CSV '
+ 'QUOTE {5} ESCAPE \'\'\'\';'
+ ],
+ cmd='import_export'
+ ),
+ params=dict(
+ filename='test_import_file.csv',
+ format='csv',
+ is_import=True,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='postgres',
+ columns=['test_col_1', 'test_col_2'],
+ icolumns=[],
+ schema="import_test_schema",
+ table="import_test_table",
+ storage='/'
+ ),
+ url='/import_export/job/{0}',
+ expected_cmd_opts=['--command', 'copy', 'FROM',
+ 'import_test_schema', 'import_test_table'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ ))
+ ]
+
+ @patch('pgadmin.tools.import_export.IEMessage.get_server_details')
+ @patch('pgadmin.misc.bgprocess.processes.Popen')
+ @patch('pgadmin.misc.bgprocess.processes.db')
+ @patch('pgadmin.tools.import_export.current_user')
+ @patch('pgadmin.misc.bgprocess.processes.current_user')
+ def runTest(self, current_user_mock, current_user, db_mock,
+ popen_mock, get_server_details_mock):
+ with self.app.app_context():
+ current_user.id = 1
+ current_user_mock.id = 1
+ current_app.PGADMIN_RUNTIME = False
+
+ def db_session_add_mock(j):
+ cmd_obj = loads(j.desc)
+ self.assertTrue(isinstance(cmd_obj, IProcessDesc))
+
+ self.assertEqual(cmd_obj.bfile, self.params['filename'])
+ self.assertEqual(cmd_obj.database,
+ self.class_params['database'])
+
+ command = ' "' + self.class_params['args'][0] + '"' + \
+ ' "' + '\\' + self.class_params['args'][1].format(
+ self.params['schema'],
+ self.params['table'],
+ self.params['columns'][0],
+ self.params['columns'][1],
+ self.params['filename'],
+ '\\' + self.params['quote']
+ ) + '"'
+ self.assertEqual(cmd_obj._cmd, command)
+
+ db_mock.session.add.side_effect = db_session_add_mock
+ db_mock.session.commit = MagicMock(return_value=True)
+
+ get_server_details_mock.return_value = \
+ self.class_params['name'], \
+ self.class_params['host'], \
+ self.class_params['port']
+
+ args = self.class_params['args'][1].format(
+ self.params['schema'],
+ self.params['table'],
+ self.params['columns'][0],
+ self.params['columns'][1],
+ self.params['filename'],
+ self.params['quote']
+ )
+
+ import_export_obj = IEMessage(
+ *[self.class_params['args'][0], args],
+ **{
+ 'sid': self.class_params['sid'],
+ 'schema': self.params['schema'],
+ 'table': self.params['table'],
+ 'is_import': self.params['is_import'],
+ 'database': self.params['database'],
+ 'filename': self.params['filename'],
+ 'storage': self.params['storage'],
+ }
+ )
+
+ p = BatchProcess(
+ desc=import_export_obj,
+ cmd=self.class_params['cmd'],
+ args=args
+ )
+
+ # Check that _create_process has been called
+ self.assertTrue(db_mock.session.add.called)
+
+ # Check start method
+ self._check_start(popen_mock, p, import_export_obj)
+
+ # Check list method
+ self._check_list(p, import_export_obj)
+
+ @patch('pgadmin.misc.bgprocess.processes.Process')
+ def _check_start(self, popen_mock, p, import_export_obj, process_mock):
+ class TestMockProcess():
+ def __init__(self, desc, args, cmd):
+ self.pid = 1
+ self.exit_code = 1
+ self.start_time = '2018-04-17 06:18:56.315445 +0000'
+ self.end_time = None
+ self.desc = dumps(desc)
+ self.arguments = " ".join(args)
+ self.command = cmd
+ self.acknowledge = None
+ self.process_state = 0
+
+ mock_result = process_mock.query.filter_by.return_value
+ mock_result.first.return_value = TestMockProcess(
+ import_export_obj, self.class_params['args'],
+ self.class_params['cmd'])
+
+ cmd_test = self.class_params['cmd']
+ assert_true = self.assertTrue
+
+ class PopenMockSideEffect():
+ def __init__(self, cmd, **kwargs):
+ assert_true(cmd_test in cmd)
+ assert_true('env' in kwargs)
+
+ # Need not to call the actual poll, so passing.
+ def poll(self):
+ pass
+
+ popen_mock.side_effect = PopenMockSideEffect
+ p.start()
+
+ self.assertTrue(popen_mock.called)
+
+ @patch('pgadmin.tools.import_export.get_storage_directory')
+ @patch('pgadmin.misc.bgprocess.processes.Process')
+ @patch('pgadmin.misc.bgprocess.processes.BatchProcess.'
+ 'update_process_info')
+ def _check_list(self, p, import_export_obj, update_process_info_mock,
+ process_mock, get_storage_directory_mock):
+ class TestMockProcess():
+ def __init__(self, desc, args, cmd):
+ self.pid = 1
+ self.exit_code = 1
+ self.start_time = '2018-04-17 06:18:56.315445 +0000'
+ self.end_time = None
+ self.desc = dumps(desc)
+ self.arguments = " ".join(args)
+ self.command = cmd
+ self.acknowledge = None
+ self.process_state = 0
+
+ process_mock.query.filter_by.return_value = [
+ TestMockProcess(import_export_obj,
+ self.class_params['args'],
+ self.class_params['cmd'])]
+
+ update_process_info_mock.return_value = [True, True]
+ get_storage_directory_mock.return_value = '//'
+
+ ret_value = p.list()
+ self.assertEqual(1, len(ret_value))
+ self.assertTrue('details' in ret_value[0])
+ self.assertTrue('desc' in ret_value[0])
diff --git a/web/pgadmin/tools/import_export/tests/test_create_export_job.py b/web/pgadmin/tools/import_export/tests/test_create_export_job.py
new file mode 100644
index 000000000..b7849ce16
--- /dev/null
+++ b/web/pgadmin/tools/import_export/tests/test_create_export_job.py
@@ -0,0 +1,149 @@
+##########################################################################
+#
+# pgAdmin 4 - PostgreSQL Tools
+#
+# Copyright (C) 2013 - 2020, The pgAdmin Development Team
+# This software is released under the PostgreSQL Licence
+#
+##########################################################################
+
+import os
+
+from pgadmin.utils.route import BaseTestGenerator
+from regression import parent_node_dict
+import pgadmin.tools.import_export.tests.test_import_export_utils \
+ as import_export_utils
+from pgadmin.utils import does_utility_exist
+
+from pgadmin.browser.server_groups.servers.databases.tests import utils as \
+ database_utils
+
+
+class ExportJobTest(BaseTestGenerator):
+ """Export api test cases"""
+
+ import_export_url = '/import_export/job/{0}'
+
+ scenarios = [
+ ('When exporting a table with the default options',
+ dict(
+ params=dict(
+ filename='test_import_export',
+ format='csv',
+ is_import=False,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'TO'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )
+ )),
+ ('When exporting a table with binary, encoding, delimiter, quote',
+ dict(
+ params=dict(
+ filename='test_import_export_bin',
+ format='binary',
+ is_import=False,
+ encoding="LATIN1",
+ delimiter="|",
+ quote="'",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'TO'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )
+ )),
+ ('When exporting a table with text, encoding, delimiter, quote',
+ dict(
+ params=dict(
+ filename='test_import_export_text',
+ format='text',
+ is_import=False,
+ encoding="ISO_8859_5",
+ delimiter="[tab]",
+ quote="\"",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'TO'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )
+ ))
+ ]
+
+ def setUp(self):
+
+ import_export_utils.setup_export_data(self)
+
+ self.params['database'] = self.db_name
+ self.params['schema'] = self.schema_name
+ self.params['table'] = self.table_name
+ self.params['columns'] = [self.column_name, self.column_name_1]
+
+ if 'default_binary_paths' not in self.server or \
+ self.server['default_binary_paths'] is None or \
+ self.server['type'] not in self.server['default_binary_paths'] or\
+ self.server['default_binary_paths'][self.server['type']] == '':
+ self.skipTest(
+ "default_binary_paths is not set for the server {0}".format(
+ self.server['name']
+ )
+ )
+
+ bin_p = self.server['default_binary_paths'][self.server['type']]
+
+ binary_path = os.path.join(bin_p, 'psql')
+
+ if os.name == 'nt':
+ binary_path = binary_path + '.exe'
+
+ ret_val = does_utility_exist(binary_path)
+ if ret_val is not None:
+ self.skipTest(ret_val)
+
+ def runTest(self):
+ self.server_id = parent_node_dict["server"][-1]["server_id"]
+ url = self.url.format(self.server_id)
+
+ # Create the import/export job
+ job_id = import_export_utils.create_import_export_job(self.tester,
+ url,
+ self.params,
+ self.assertEqual)
+ export_file = import_export_utils\
+ .run_import_export_job(self.tester, job_id, self.expected_params,
+ self.assertIn,
+ self.assertNotIn,
+ self.assertEqual
+ )
+
+ if export_file is not None and os.path.isfile(export_file):
+ os.remove(export_file)
+
+ def tearDown(self):
+ # Disconnect the database
+ database_utils.disconnect_database(self, self.server_id, self.db_id)
diff --git a/web/pgadmin/tools/import_export/tests/test_create_import_job.py b/web/pgadmin/tools/import_export/tests/test_create_import_job.py
new file mode 100644
index 000000000..fe2e93d3d
--- /dev/null
+++ b/web/pgadmin/tools/import_export/tests/test_create_import_job.py
@@ -0,0 +1,241 @@
+##########################################################################
+#
+# pgAdmin 4 - PostgreSQL Tools
+#
+# Copyright (C) 2013 - 2020, The pgAdmin Development Team
+# This software is released under the PostgreSQL Licence
+#
+##########################################################################
+
+import os
+
+from pgadmin.utils.route import BaseTestGenerator
+from regression import parent_node_dict
+import pgadmin.tools.import_export.tests.test_import_export_utils \
+ as import_export_utils
+from pgadmin.utils import does_utility_exist
+
+from pgadmin.browser.server_groups.servers.databases.tests import utils as \
+ database_utils
+from pgadmin.tools.import_export.tests import \
+ test_import_export_utils as io_utils
+
+
+class ImportJobTest(BaseTestGenerator):
+ """Import api test cases"""
+
+ import_export_url = '/import_export/job/{0}'
+
+ scenarios = [
+ ('When importing a table with the default options',
+ dict(
+ params=dict(
+ filename='test_import_export',
+ format='csv',
+ is_import=True,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'FROM'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ ),
+ export_options=dict(
+ params=dict(
+ filename='test_import_export',
+ format='csv',
+ is_import=False,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'TO'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )
+ )
+ )),
+ ('When importing a table with binary, encoding, delimiter, quote',
+ dict(
+ params=dict(
+ filename='test_import_export_bin',
+ format='binary',
+ is_import=True,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'FROM'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ ),
+ export_options=dict(
+ params=dict(
+ filename='test_import_export_bin',
+ format='binary',
+ is_import=False,
+ encoding="LATIN1",
+ delimiter="|",
+ quote="'",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'TO'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )
+ )
+ )),
+ ('When importing a table with text, encoding, delimiter, quote',
+ dict(
+ params=dict(
+ filename='test_import_export_text',
+ format='text',
+ is_import=True,
+ encoding="ISO_8859_5",
+ delimiter="[tab]",
+ quote="\"",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'FROM'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ ),
+ export_options=dict(
+ params=dict(
+ filename='test_import_export_text',
+ format='text',
+ is_import=False,
+ encoding="ISO_8859_5",
+ delimiter="[tab]",
+ quote="'",
+ escape="'",
+ database='',
+ columns=[],
+ icolumns=[],
+ schema="",
+ table=""
+ ),
+ url=import_export_url,
+ expected_params=dict(
+ expected_cmd_opts=['--command', 'copy', 'TO'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )
+ )
+ ))
+ ]
+
+ def setUp(self):
+
+ import_export_utils.setup_export_data(self)
+
+ self.export_options['params']['database'] = self.db_name
+ self.export_options['params']['schema'] = self.schema_name
+ self.export_options['params']['table'] = self.table_name
+ self.export_options['params']['columns'] = [self.column_name,
+ self.column_name_1]
+
+ self.params['database'] = self.db_name
+ self.params['schema'] = self.schema_name
+ self.params['table'] = self.table_name
+ self.params['columns'] = [self.column_name, self.column_name_1]
+
+ if 'default_binary_paths' not in self.server or \
+ self.server['default_binary_paths'] is None or \
+ self.server['type'] not in \
+ self.server['default_binary_paths'] or \
+ self.server['default_binary_paths'][self.server['type']] == '':
+
+ self.skipTest(
+ "default_binary_paths is not set for the server {0}".format(
+ self.server['name']
+ )
+ )
+
+ bin_p = self.server['default_binary_paths'][self.server['type']]
+
+ binary_path = os.path.join(bin_p, 'psql')
+
+ if os.name == 'nt':
+ binary_path = binary_path + '.exe'
+
+ ret_val = does_utility_exist(binary_path)
+ if ret_val is not None:
+ self.skipTest(ret_val)
+
+ def create_export(self):
+ url = self.export_options['url'].format(self.server_id)
+ job_id = io_utils.create_import_export_job(self.tester, url,
+ self.export_options[
+ 'params'],
+ self.assertEqual)
+ self.export_file = io_utils.run_import_export_job(
+ self.tester,
+ job_id,
+ self.export_options['expected_params'],
+ self.assertIn,
+ self.assertNotIn,
+ self.assertEqual
+ )
+
+ def runTest(self):
+ self.server_id = parent_node_dict["server"][-1]["server_id"]
+ url = self.url.format(self.server_id)
+
+ self.create_export()
+
+ # Create the import/export job
+ job_id = import_export_utils.create_import_export_job(self.tester,
+ url,
+ self.params,
+ self.assertEqual)
+ import_file = import_export_utils\
+ .run_import_export_job(self.tester, job_id, self.expected_params,
+ self.assertIn,
+ self.assertNotIn,
+ self.assertEqual
+ )
+
+ if import_file is not None and os.path.isfile(import_file):
+ os.remove(import_file)
+
+ def tearDown(self):
+
+ # Disconnect the database
+ database_utils.disconnect_database(self, self.server_id, self.db_id)
diff --git a/web/pgadmin/tools/import_export/tests/test_import_export_create_job_unit_test.py b/web/pgadmin/tools/import_export/tests/test_import_export_create_job_unit_test.py
new file mode 100644
index 000000000..0227b3ca8
--- /dev/null
+++ b/web/pgadmin/tools/import_export/tests/test_import_export_create_job_unit_test.py
@@ -0,0 +1,353 @@
+##########################################################################
+#
+# pgAdmin 4 - PostgreSQL Tools
+#
+# Copyright (C) 2013 - 2020, The pgAdmin Development Team
+# This software is released under the PostgreSQL Licence
+#
+##########################################################################
+
+import simplejson as json
+import os
+
+from pgadmin.utils.route import BaseTestGenerator
+from regression import parent_node_dict
+from pgadmin.utils import server_utils as server_utils, does_utility_exist
+from pgadmin.browser.server_groups.servers.databases.tests import utils as \
+ database_utils
+from unittest.mock import patch, MagicMock
+
+
+class IECreateJobTest(BaseTestGenerator):
+ """Test the IECreateJob class"""
+
+ import_export_url = '/import_export/job/{0}'
+
+ scenarios = [
+ ('When export file with default options',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres'
+ ),
+ params=dict(
+ filename='test_export_file.csv',
+ format='csv',
+ is_import=False,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='postgres',
+ columns=['test_col_1', 'test_col_2'],
+ icolumns=[],
+ schema="export_test_schema",
+ table="export_test_table"
+ ),
+ url=import_export_url,
+ expected_cmd_opts=['--command', 'copy', 'TO',
+ 'export_test_schema', 'export_test_table'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )),
+ ('When export file with csv file, header, delimiter=tab, '
+ 'encoding=LATIN1',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres'
+ ),
+ params=dict(
+ filename='test_export_file_01',
+ format="csv",
+ encoding="LATIN1",
+ header=True,
+ delimiter="[tab]",
+ quote="'",
+ escape="\"",
+ is_import=False,
+ database='postgres',
+ columns=['test_col_010', 'test_col_011'],
+ icolumns=[],
+ schema="test_schema_01",
+ table="export_test_table_01"
+ ),
+ url=import_export_url,
+ expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_01',
+ 'export_test_table_01', 'HEADER', 'DELIMITER',
+ 'LATIN1'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )),
+ ('When export file with csv file, header, delimiter=tab, '
+ 'encoding=LATIN1',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres'
+ ),
+ params=dict(
+ filename='test_export_file_01',
+ format="csv",
+ encoding="LATIN1",
+ header=True,
+ delimiter="[tab]",
+ quote="'",
+ escape="\"",
+ is_import=False,
+ database='postgres',
+ columns=['test_col_010', 'test_col_011'],
+ icolumns=[],
+ schema="test_schema_01",
+ table="export_test_table_01"
+ ),
+ url=import_export_url,
+ expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_01',
+ 'export_test_table_01', 'HEADER', 'DELIMITER',
+ 'LATIN1'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )),
+ ('When export file with binary file, oid, encoding=UTF8',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres'
+ ),
+ params=dict(
+ filename='test_export_file_02',
+ format="binary",
+ encoding="UTF8",
+ oid=True,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ is_import=False,
+ database='postgres',
+ columns=['test_col_020', 'test_col_021'],
+ icolumns=[],
+ schema="test_schema_02",
+ table="export_test_table_02"
+ ),
+ server_max_version=119999,
+ skip_msg="OIDs not supported by EPAS/PG 12.0 and above.",
+ url=import_export_url,
+ expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_02',
+ 'export_test_table_02', 'UTF8',
+ 'OIDS'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )),
+ ('When export file with text file, delimiter=|, encoding=ISO_8859_6',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres'
+ ),
+ params=dict(
+ filename='test_export_file_03',
+ format="text",
+ encoding="ISO_8859_6",
+ delimiter="|",
+ quote="\"",
+ escape="'",
+ is_import=False,
+ database='postgres',
+ columns=['test_col_030', 'test_col_031'],
+ icolumns=[],
+ schema="test_schema_03",
+ table="export_test_table_03"
+ ),
+ url=import_export_url,
+ expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_03',
+ 'export_test_table_03', 'DELIMITER',
+ 'ISO_8859_6'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )),
+ ('When export file with binary file, delimiter=tab, '
+ 'encoding=ISO_8859_6',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres'
+ ),
+ params=dict(
+ filename='test_export_file_04',
+ format="binary",
+ encoding="ISO_8859_6",
+ quote="\"",
+ escape="'",
+ is_import=False,
+ database='postgres',
+ columns=['test_col_040', 'test_col_041'],
+ icolumns=[],
+ schema="test_schema_04",
+ table="export_test_table_04"
+ ),
+ url=import_export_url,
+ expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_04',
+ 'export_test_table_04',
+ 'ISO_8859_6'],
+ not_expected_cmd_opts=['DELIMITER'],
+ expected_exit_code=[0, None]
+ )),
+ ('When import file with default options',
+ dict(
+ class_params=dict(
+ sid=1,
+ name='test_export_server',
+ port=5444,
+ host='localhost',
+ database='postgres',
+ bfile='test_export',
+ username='postgres'
+ ),
+ params=dict(
+ filename='test_import_file.csv',
+ format='csv',
+ is_import=True,
+ delimiter="",
+ quote="\"",
+ escape="'",
+ database='postgres',
+ columns=['test_col_1', 'test_col_2'],
+ icolumns=[],
+ schema="import_test_schema",
+ table="import_test_table"
+ ),
+ url=import_export_url,
+ expected_cmd_opts=['--command', 'copy', 'FROM',
+ 'import_test_schema', 'import_test_table'],
+ not_expected_cmd_opts=[],
+ expected_exit_code=[0, None]
+ )),
+ ]
+
+ def setUp(self):
+
+ if 'default_binary_paths' not in self.server or \
+ self.server['default_binary_paths'] is None or \
+ self.server['type'] not in self.server['default_binary_paths'] or \
+ self.server['default_binary_paths'][self.server['type']] == '':
+ self.skipTest(
+ "default_binary_paths is not set for the server {0}".format(
+ self.server['name']
+ )
+ )
+
+ bin_p = self.server['default_binary_paths'][self.server['type']]
+
+ binary_path = os.path.join(bin_p, 'psql')
+
+ if os.name == 'nt':
+ binary_path = binary_path + '.exe'
+
+ ret_val = does_utility_exist(binary_path)
+ if ret_val is not None:
+ self.skipTest(ret_val)
+
+ @patch('pgadmin.tools.import_export.Server')
+ @patch('pgadmin.tools.import_export.IEMessage')
+ @patch('pgadmin.tools.import_export.filename_with_file_manager_path')
+ @patch('pgadmin.tools.import_export.BatchProcess')
+ @patch('pgadmin.utils.driver.psycopg2.server_manager.ServerManager.'
+ 'export_password_env')
+ def runTest(self, export_password_env_mock, batch_process_mock,
+ filename_mock, ie_message_mock, server_mock):
+ class TestMockServer():
+ def __init__(self, name, host, port, id, username,
+ maintenance_db):
+ self.name = name
+ self.host = host
+ self.port = port
+ self.id = id
+ self.username = username
+ self.maintenance_db = maintenance_db
+
+ self.server_id = parent_node_dict["server"][-1]["server_id"]
+ mock_obj = TestMockServer(self.class_params['name'],
+ self.class_params['host'],
+ self.class_params['port'],
+ self.server_id,
+ self.class_params['username'],
+ self.class_params['database']
+ )
+ mock_result = server_mock.query.filter_by.return_value
+ mock_result.first.return_value = mock_obj
+
+ filename_mock.return_value = self.params['filename']
+
+ batch_process_mock.set_env_variables = MagicMock(
+ return_value=True
+ )
+ batch_process_mock.start = MagicMock(
+ return_value=True
+ )
+
+ export_password_env_mock.return_value = True
+
+ server_response = server_utils.connect_server(self, self.server_id)
+ if server_response["info"] == "Server connected.":
+ db_owner = server_response['data']['user']['name']
+ self.data = database_utils.get_db_data(db_owner)
+
+ if hasattr(self, 'server_max_version') \
+ and server_response["data"]["version"] > self.\
+ server_max_version:
+ self.skipTest(self.skip_msg)
+
+ url = self.url.format(self.server_id)
+
+ # Create the import/export job
+ response = self.tester.post(url,
+ data=json.dumps(self.params),
+ content_type='html/json')
+ self.assertEqual(response.status_code, 200)
+
+ self.assertTrue(ie_message_mock.called)
+ self.assertTrue(batch_process_mock.called)
+
+ if self.expected_cmd_opts:
+ for opt in self.expected_cmd_opts:
+ arg = repr(batch_process_mock.call_args_list[0][1]['args'])
+ self.assertIn(
+ opt,
+ arg
+ )
+ if self.not_expected_cmd_opts:
+ for opt in self.not_expected_cmd_opts:
+ arg = repr(batch_process_mock.call_args_list[0][1]['args'])
+ self.assertNotIn(
+ opt,
+ arg
+ )
diff --git a/web/pgadmin/tools/import_export/tests/test_import_export_message.py b/web/pgadmin/tools/import_export/tests/test_import_export_message.py
new file mode 100644
index 000000000..9b42e9641
--- /dev/null
+++ b/web/pgadmin/tools/import_export/tests/test_import_export_message.py
@@ -0,0 +1,130 @@
+##########################################################################
+#
+# pgAdmin 4 - PostgreSQL Tools
+#
+# Copyright (C) 2013 - 2020, The pgAdmin Development Team
+# This software is released under the PostgreSQL Licence
+#
+##########################################################################
+
+from pgadmin.tools.import_export import IEMessage
+from pgadmin.utils.route import BaseTestGenerator
+from unittest.mock import patch
+import config
+
+
+class IEMessageTest(BaseTestGenerator):
+ """Test the IEMessage class"""
+ scenarios = [
+ ('When Export table with default options',
+ dict(
+ class_params=dict(
+ sid=1,
+ schema='public',
+ name='test_export',
+ is_import=False,
+ port=5444,
+ host='localhost',
+ database='postgres',
+ server='postgres x',
+ filename='/test_export_file.csv',
+ storage='/',
+ table='test_table',
+ cmd="/test_path",
+ args=[
+ '--command',
+ '\\copy public.test_table (m_id) TO '
+ '\'/test_path/text_export.csv\' CSV '
+ 'QUOTE \'"\' ESCAPE \'\'\'\';'
+ ]
+ ),
+ expected_msg="Copying table data '{0}.{1}' on "
+ "database '{2}' and server ({3}:{4})",
+ expected_storage_dir='/'
+
+ )),
+ ('When Export table with folder path',
+ dict(
+ class_params=dict(
+ sid=1,
+ schema='public',
+ name='test_export',
+ is_import=False,
+ port=5444,
+ host='localhost',
+ database='postgres',
+ server='postgres x',
+ filename='/test_path/test_export_file.csv',
+ storage='/',
+ table='test_table',
+ cmd="/test_path",
+ args=[
+ '--command',
+ '\\copy public.test_table (m_id) TO '
+ '\'/test_path/text_export.csv\' CSV '
+ 'QUOTE \'"\' ESCAPE \'\'\'\';'
+ ]
+ ),
+ expected_msg="Copying table data '{0}.{1}' on "
+ "database '{2}' and server ({3}:{4})",
+ expected_storage_dir='/test_path'
+
+ )),
+ ]
+
+ @patch('pgadmin.tools.import_export.get_storage_directory')
+ @patch('pgadmin.tools.import_export.IEMessage.get_server_details')
+ @patch('os.path.realpath')
+ def runTest(self, realpath_mock, get_server_details_mock,
+ get_storage_directory_mock):
+
+ name = self.class_params['name']
+ host = self.class_params['host']
+ port = self.class_params['port']
+
+ get_server_details_mock.return_value = name, host, port
+
+ get_storage_directory_mock.return_value = '//'
+ realpath_mock.return_value = self.class_params['filename']
+
+ import_export_obj = IEMessage(
+ *self.class_params['args'],
+ **{
+ 'sid': self.class_params['sid'],
+ 'schema': self.class_params['schema'],
+ 'table': self.class_params['table'],
+ 'is_import': self.class_params['is_import'],
+ 'database': self.class_params['database'],
+ 'filename': self.class_params['filename'],
+ 'storage': self.class_params['storage'],
+ }
+ )
+
+ expected_msg = self.expected_msg.format(
+ self.class_params['schema'],
+ self.class_params['table'],
+ self.class_params['database'],
+ self.class_params['host'],
+ self.class_params['port']
+ )
+
+ # Check the expected message returned
+ self.assertEqual(import_export_obj.message, expected_msg)
+
+ # Check the command
+ obj_details = import_export_obj.details(self.class_params['cmd'],
+ self.class_params['args'])
+
+ self.assertIn(self.class_params['schema'], obj_details)
+ self.assertIn(self.class_params['table'], obj_details)
+ self.assertIn(self.class_params['database'], obj_details)
+ self.assertIn(self.class_params['host'], obj_details)
+ self.assertIn(str(self.class_params['port']), obj_details)
+
+ if config.SERVER_MODE is False:
+ self.skipTest(
+ "Skipping tests for Storage manager in Desktop mode."
+ )
+ else:
+ storage_dir = import_export_obj.current_storage_dir
+ self.assertEqual(self.expected_storage_dir, storage_dir)
diff --git a/web/pgadmin/tools/import_export/tests/test_import_export_utils.py b/web/pgadmin/tools/import_export/tests/test_import_export_utils.py
new file mode 100644
index 000000000..e69c9526a
--- /dev/null
+++ b/web/pgadmin/tools/import_export/tests/test_import_export_utils.py
@@ -0,0 +1,194 @@
+##########################################################################
+#
+# pgAdmin 4 - PostgreSQL Tools
+#
+# Copyright (C) 2013 - 2020, The pgAdmin Development Team
+# This software is released under the PostgreSQL Licence
+#
+##########################################################################
+
+import time
+import random
+import simplejson as json
+import uuid
+import re
+
+from regression import parent_node_dict
+
+from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
+ import utils as tables_utils
+from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
+ utils as schema_utils
+from pgadmin.browser.server_groups.servers.databases.tests import utils as \
+ database_utils
+from regression.python_test_utils import test_utils as utils
+from pgadmin.browser.server_groups.servers.databases.schemas.\
+ tables.columns.tests import utils as columns_utils
+
+
+def create_import_export_job(tester, url, params, assert_equal):
+ # Create the import/export job
+ response = tester.post(url,
+ data=json.dumps(params),
+ content_type='html/json')
+ assert_equal(response.status_code, 200)
+ response_data = json.loads(response.data.decode('utf-8'))
+ job_id = response_data['data']['job_id']
+ return job_id
+
+
+def run_import_export_job(tester, job_id, expected_params, assert_in,
+ assert_not_in, assert_equal):
+ cnt = 0
+ the_process = None
+ while True:
+ if cnt >= 5:
+ break
+ # Check the process list
+ response1 = tester.get('/misc/bgprocess/?_={0}'.format(
+ random.randint(1, 9999999)))
+ assert_equal(response1.status_code, 200)
+ process_list = json.loads(response1.data.decode('utf-8'))
+
+ try:
+ the_process = next(
+ p for p in process_list if p['id'] == job_id)
+ except Exception:
+ the_process = None
+
+ if the_process and 'execution_time' in the_process:
+ break
+ time.sleep(0.5)
+ cnt += 1
+
+ assert_equal('execution_time' in the_process, True)
+ assert_equal('stime' in the_process, True)
+ assert_equal('exit_code' in the_process, True)
+ assert_equal(the_process['exit_code'] in expected_params[
+ 'expected_exit_code'
+ ], True)
+
+ io_file = None
+ if 'details' in the_process:
+ io_det = the_process['details']
+
+ temp_io_det = io_det.upper()
+
+ if temp_io_det.find(' TO ') > 0:
+ io_file = temp_io_det[temp_io_det.find(' TO ') + 3:].split(' ')[1]
+ else:
+ from_find = temp_io_det.find(' FROM ') + 5
+ io_file = temp_io_det[from_find:].split(' ')[1]
+
+ if expected_params['expected_cmd_opts']:
+ for opt in expected_params['expected_cmd_opts']:
+ assert_in(opt, the_process['details'])
+ if expected_params['not_expected_cmd_opts']:
+ for opt in expected_params['not_expected_cmd_opts']:
+ assert_not_in(opt, the_process['details'])
+
+ # Check the process details
+ p_details = tester.get('/misc/bgprocess/{0}?_={1}'.format(
+ job_id, random.randint(1, 9999999))
+ )
+ assert_equal(p_details.status_code, 200)
+
+ p_details = tester.get('/misc/bgprocess/{0}/{1}/{2}/?_={3}'.format(
+ job_id, 0, 0, random.randint(1, 9999999))
+ )
+ assert_equal(p_details.status_code, 200)
+ p_details_data = json.loads(p_details.data.decode('utf-8'))
+
+ cnt = 0
+ # Retrieve the io job process logs
+ while True:
+ out, err, status = get_params(p_details_data)
+ if status or cnt >= 5:
+ break
+
+ p_details = tester.get(
+ '/misc/bgprocess/{0}/{1}/{2}/?_={3}'.format(
+ job_id, out, err, random.randint(1, 9999999))
+ )
+ assert_equal(p_details.status_code, 200)
+ p_details_data = json.loads(p_details.data.decode('utf-8'))
+
+ cnt += 1
+ time.sleep(1)
+
+ # Check the job is complete.
+ io_ack = tester.put('/misc/bgprocess/{0}'.format(job_id))
+ assert_equal(io_ack.status_code, 200)
+ io_ack_res = json.loads(io_ack.data.decode('utf-8'))
+
+ assert_equal(io_ack_res['success'], 1)
+
+ return io_file
+
+
+def get_params(data):
+ out = 0
+ out_done = False
+ err = 0
+ err_done = False
+ if 'out' in data:
+ out = data['out'] and data['out']['pos']
+
+ if 'done' in data['out']:
+ out_done = data['out']['done']
+
+ if 'err' in data:
+ err = data['err'] and data['err']['pos']
+
+ if 'done' in data['err']:
+ err_done = data['err']['done']
+
+ return out, err, (out_done and err_done)
+
+
+def setup_export_data(sobject):
+ # Create db connection
+ sobject.db_name = parent_node_dict["database"][-1]["db_name"]
+
+ schema_info = parent_node_dict["schema"][-1]
+ sobject.server_id = schema_info["server_id"]
+ sobject.db_id = schema_info["db_id"]
+ db_con = database_utils.connect_database(sobject, utils.SERVER_GROUP,
+ sobject.server_id, sobject.db_id)
+ if not db_con['data']["connected"]:
+ raise Exception("Could not connect to database to add a table.")
+
+ # Create schema
+ sobject.schema_id = schema_info["schema_id"]
+ sobject.schema_name = schema_info["schema_name"]
+ schema_response = schema_utils.verify_schemas(sobject.server,
+ sobject.db_name,
+ sobject.schema_name)
+
+ if not schema_response:
+ raise Exception("Could not find the schema to add a table.")
+
+ # Create table
+ sobject.table_name = "table_to_export_%s" % (str(uuid.uuid4())[1:8])
+ sobject.table_id = tables_utils.create_table(sobject.server,
+ sobject.db_name,
+ sobject.schema_name,
+ sobject.table_name)
+
+ # Create column
+ sobject.column_name = "column_to_export_%s" % (str(uuid.uuid4())[1:8])
+ sobject.column_id = columns_utils.create_column(sobject.server,
+ sobject.db_name,
+ sobject.schema_name,
+ sobject.table_name,
+ sobject.column_name)
+
+ # Create column
+ sobject.column_name_1 = "column_to_export_%s" % (str(uuid.uuid4())[1:8])
+ sobject.column_id_1 = columns_utils.create_column(sobject.server,
+ sobject.db_name,
+ sobject.schema_name,
+ sobject.table_name,
+ sobject.column_name_1)
+
+ return None
diff --git a/web/pgadmin/tools/storage_manager/__init__.py b/web/pgadmin/tools/storage_manager/__init__.py
new file mode 100644
index 000000000..1185375de
--- /dev/null
+++ b/web/pgadmin/tools/storage_manager/__init__.py
@@ -0,0 +1,75 @@
+##########################################################################
+#
+# pgAdmin 4 - PostgreSQL Tools
+#
+# Copyright (C) 2013 - 2020, The pgAdmin Development Team
+# This software is released under the PostgreSQL Licence
+#
+##########################################################################
+
+"""A blueprint module implementing the storage manager functionality"""
+
+import simplejson as json
+import os
+
+from flask import url_for, Response, render_template, request, current_app
+from flask_babelex import gettext as _
+from flask_security import login_required, current_user
+from pgadmin.misc.bgprocess.processes import BatchProcess, IProcessDesc
+from pgadmin.utils import PgAdminModule, get_storage_directory, html, \
+ fs_short_path, document_dir, IS_WIN, does_utility_exist
+from pgadmin.utils.ajax import make_json_response, bad_request
+
+from config import PG_DEFAULT_DRIVER
+from pgadmin.model import Server
+from pgadmin.utils.constants import MIMETYPE_APP_JS
+
+MODULE_NAME = 'storage_manager'
+
+
+class StorageManagerModule(PgAdminModule):
+ """
+ class StorageManagerModule(PgAdminModule)
+
+ A module class for manipulating file operation which is derived from
+ PgAdminModule.
+ """
+
+ LABEL = _('Storage Manager')
+
+ def get_own_javascripts(self):
+ """"
+ Returns:
+ list: js files used by this module
+ """
+ scripts = list()
+ for name, script in [
+ ['pgadmin.tools.storage_manager', 'js/storage_manager']
+ ]:
+ scripts.append({
+ 'name': name,
+ 'path': url_for('storage_manager.index') + script,
+ 'when': None
+ })
+
+ return scripts
+
+
+blueprint = StorageManagerModule(MODULE_NAME, __name__)
+
+
+@blueprint.route("/")
+@login_required
+def index():
+ return bad_request(errormsg=_("This URL cannot be called directly."))
+
+
+@blueprint.route("/js/storage_manager.js")
+@login_required
+def script():
+ """render the import/export javascript file"""
+ return Response(
+ response=render_template("storage_manager/js/storage_manager.js", _=_),
+ status=200,
+ mimetype=MIMETYPE_APP_JS
+ )
diff --git a/web/pgadmin/tools/storage_manager/static/js/storage_manager.js b/web/pgadmin/tools/storage_manager/static/js/storage_manager.js
new file mode 100644
index 000000000..c52019067
--- /dev/null
+++ b/web/pgadmin/tools/storage_manager/static/js/storage_manager.js
@@ -0,0 +1,93 @@
+/////////////////////////////////////////////////////////////
+//
+// pgAdmin 4 - PostgreSQL Tools
+//
+// Copyright (C) 2013 - 2020, The pgAdmin Development Team
+// This software is released under the PostgreSQL Licence
+//
+//////////////////////////////////////////////////////////////
+import { set_last_traversed_dir, getTransId } from '../../../../misc/file_manager/static/js/helpers';
+
+define([
+ 'sources/gettext', 'sources/url_for', 'jquery', 'underscore', 'pgadmin.alertifyjs',
+ 'sources/pgadmin', 'pgadmin.browser', 'sources/csrf', 'pgadmin.file_manager',
+], function (
+ gettext, url_for, $, _, alertify, pgAdmin, pgBrowser, csrfToken
+) {
+
+ pgAdmin = pgAdmin || window.pgAdmin || {};
+ var isServerMode = (function() { return pgAdmin.server_mode == 'True'; })();
+
+ var pgTools = pgAdmin.Tools = pgAdmin.Tools || {};
+
+ if(!isServerMode) {
+ return;
+ }
+
+ // Return back, this has been called more than once
+ if (pgAdmin.Tools.storage_manager)
+ return pgAdmin.Tools.storage_manager;
+
+ pgTools.storage_manager = {
+ init: function () {
+ // We do not want to initialize the module multiple times.
+ if (this.initialized)
+ return;
+
+ this.initialized = true;
+ csrfToken.setPGCSRFToken(pgAdmin.csrf_token_header, pgAdmin.csrf_token);
+
+ var storage_manager = this.callback_storage_manager.bind(this);
+
+ pgBrowser.Events.on(
+ 'pgadmin:tools:storage_manager', storage_manager
+ );
+
+ // Define the nodes on which the menus to be appear
+ var menus = [{
+ name: 'storage_manager',
+ module: this,
+ applies: ['tools'],
+ callback: 'callback_storage_manager',
+ priority: 2,
+ label: gettext('Storage Manager'),
+ enable: true,
+ }];
+
+ pgBrowser.add_menus(menus);
+ },
+
+ /*
+ Open the dialog for the storage functionality
+ */
+ callback_storage_manager: function (path) {
+
+ var params = {
+ supported_types: ['sql', 'csv', '*'],
+ dialog_type: 'storage_dialog',
+ dialog_title: 'Storage Manager',
+ btn_primary: undefined,
+ };
+
+ if (!_.isUndefined(path) && !_.isNull(path) && !_.isEmpty(path)) {
+
+ var transId = getTransId(JSON.stringify(params));
+ var t_res;
+ if (transId.readyState == 4) {
+ t_res = JSON.parse(transId.responseText);
+ }
+ var trans_id = _.isUndefined(t_res) ? 0 : t_res.data.fileTransId;
+
+ set_last_traversed_dir({'path': path}, trans_id);
+ pgAdmin.FileManager.init();
+ pgAdmin.FileManager.show_dialog(params);
+ }
+ else {
+ pgAdmin.FileManager.init();
+ pgAdmin.FileManager.show_dialog(params);
+ }
+ },
+ };
+
+ return pgAdmin.Tools.storage_manager;
+});
diff --git a/web/regression/javascript/file_manager/file_manager_specs.js b/web/regression/javascript/file_manager/file_manager_specs.js
index 0485ee2cb..83cb0f397 100644
--- a/web/regression/javascript/file_manager/file_manager_specs.js
+++ b/web/regression/javascript/file_manager/file_manager_specs.js
@@ -100,4 +100,22 @@ describe('fileSelectDialog', function () {
expect(Alertify.createModeDlg).toHaveBeenCalled();
});
});
+
+ describe('When dialog is called for storage file', () => {
+ it('Storage file dialog', function() {
+ params = {
+ 'dialog_title': 'Storage Manager',
+ 'dialog_type': 'storage_dialog',
+ };
+
+ spyOn(Alertify, 'fileStorageDlg').and.callFake(function() {
+ this.resizeTo = function() {};
+ return this;
+ });
+
+ pgAdmin.FileManager.show_dialog(params);
+
+ expect(Alertify.fileStorageDlg).toHaveBeenCalled();
+ });
+ });
});
diff --git a/web/webpack.config.js b/web/webpack.config.js
index a50588556..c9ddf80c2 100644
--- a/web/webpack.config.js
+++ b/web/webpack.config.js
@@ -500,6 +500,7 @@ module.exports = [{
',pgadmin.tools.debugger.direct' +
',pgadmin.node.pga_job' +
',pgadmin.tools.schema_diff' +
+ ',pgadmin.tools.storage_manager' +
',pgadmin.tools.search_objects',
},
}, {
diff --git a/web/webpack.shim.js b/web/webpack.shim.js
index da19b6897..155fb9eaf 100644
--- a/web/webpack.shim.js
+++ b/web/webpack.shim.js
@@ -274,6 +274,7 @@ var webpackShimConfig = {
'pgadmin.tools.schema_diff': path.join(__dirname, './pgadmin/tools/schema_diff/static/js/schema_diff'),
'pgadmin.tools.schema_diff_ui': path.join(__dirname, './pgadmin/tools/schema_diff/static/js/schema_diff_ui'),
'pgadmin.tools.search_objects': path.join(__dirname, './pgadmin/tools/search_objects/static/js/search_objects'),
+ 'pgadmin.tools.storage_manager': path.join(__dirname, './pgadmin/tools/storage_manager/static/js/storage_manager'),
'pgadmin.search_objects': path.join(__dirname, './pgadmin/tools/search_objects/static/js'),
'pgadmin.tools.user_management': path.join(__dirname, './pgadmin/tools/user_management/static/js/user_management'),
'pgadmin.user_management.current_user': '/user_management/current_user',