21
21
import org .apache .doris .catalog .Env ;
22
22
import org .apache .doris .catalog .KeysType ;
23
23
import org .apache .doris .catalog .OlapTable ;
24
- import org .apache .doris .cloud .proto .Cloud .ObjectStoreInfoPB ;
25
24
import org .apache .doris .cloud .security .SecurityChecker ;
26
- import org .apache .doris .cloud .storage .RemoteBase ;
27
- import org .apache .doris .cloud .storage .RemoteBase .ObjectInfo ;
28
25
import org .apache .doris .common .AnalysisException ;
29
26
import org .apache .doris .common .Config ;
30
27
import org .apache .doris .common .DdlException ;
31
28
import org .apache .doris .common .InternalErrorCode ;
32
29
import org .apache .doris .common .UserException ;
33
30
import org .apache .doris .common .util .PrintableMap ;
34
31
import org .apache .doris .common .util .TimeUtils ;
35
- import org .apache .doris .datasource .property .constants . AzureProperties ;
36
- import org .apache .doris .datasource . property . constants . S3Properties ;
32
+ import org .apache .doris .datasource .property .storage . ObjectStorageProperties ;
33
+ import org .apache .doris .fsv2 . FileSystemFactory ;
37
34
import org .apache .doris .load .EtlJobType ;
38
35
import org .apache .doris .load .loadv2 .LoadTask ;
39
36
import org .apache .doris .mysql .privilege .PrivPredicate ;
40
37
import org .apache .doris .qe .ConnectContext ;
38
+ import org .apache .doris .thrift .TFileType ;
41
39
42
40
import com .google .common .base .Function ;
43
41
import com .google .common .base .Joiner ;
@@ -102,15 +100,11 @@ public class LoadStmt extends DdlStmt implements NotFallbackInParser {
102
100
// deprecated, keeping this property to make LoadStmt#checkProperties() happy
103
101
public static final String USE_NEW_LOAD_SCAN_NODE = "use_new_load_scan_node" ;
104
102
105
- // for load data from Baidu Object Store(BOS)
103
+ // for load data from Baidu Object Store(BOS) todo wait new property support
106
104
public static final String BOS_ENDPOINT = "bos_endpoint" ;
107
105
public static final String BOS_ACCESSKEY = "bos_accesskey" ;
108
106
public static final String BOS_SECRET_ACCESSKEY = "bos_secret_accesskey" ;
109
107
110
- // for S3 load check
111
- public static final List <String > PROVIDERS =
112
- new ArrayList <>(Arrays .asList ("cos" , "oss" , "s3" , "obs" , "bos" , "azure" ));
113
-
114
108
// mini load params
115
109
public static final String KEY_IN_PARAM_COLUMNS = "columns" ;
116
110
public static final String KEY_IN_PARAM_SET = "set" ;
@@ -454,8 +448,6 @@ public void analyze(Analyzer analyzer) throws UserException {
454
448
for (int i = 0 ; i < dataDescription .getFilePaths ().size (); i ++) {
455
449
String location = brokerDesc .getFileLocation (dataDescription .getFilePaths ().get (i ));
456
450
dataDescription .getFilePaths ().set (i , location );
457
- StorageBackend .checkPath (dataDescription .getFilePaths ().get (i ),
458
- brokerDesc .getStorageType (), "DATA INFILE must be specified." );
459
451
dataDescription .getFilePaths ().set (i , dataDescription .getFilePaths ().get (i ));
460
452
}
461
453
}
@@ -522,27 +514,6 @@ public void analyze(Analyzer analyzer) throws UserException {
522
514
user = ConnectContext .get ().getQualifiedUser ();
523
515
}
524
516
525
-
526
- private String getProviderFromEndpoint () {
527
- Map <String , String > properties = brokerDesc .getProperties ();
528
- for (Map .Entry <String , String > entry : properties .entrySet ()) {
529
- if (entry .getKey ().equalsIgnoreCase (S3Properties .PROVIDER )) {
530
- // S3 Provider properties should be case insensitive.
531
- return entry .getValue ().toUpperCase ();
532
- }
533
- }
534
- return S3Properties .S3_PROVIDER ;
535
- }
536
-
537
- private String getBucketFromFilePath (String filePath ) throws Exception {
538
- String [] parts = filePath .split ("\\ /\\ /" );
539
- if (parts .length < 2 ) {
540
- throw new Exception ("filePath is not valid" );
541
- }
542
- String buckt = parts [1 ].split ("\\ /" )[0 ];
543
- return buckt ;
544
- }
545
-
546
517
public String getComment () {
547
518
return comment ;
548
519
}
@@ -630,21 +601,17 @@ private void checkEndpoint(String endpoint) throws UserException {
630
601
}
631
602
632
603
public void checkS3Param () throws UserException {
633
- Map <String , String > brokerDescProperties = brokerDesc .getProperties ();
634
- if (brokerDescProperties .containsKey (S3Properties .Env .ENDPOINT )
635
- && brokerDescProperties .containsKey (S3Properties .Env .ACCESS_KEY )
636
- && brokerDescProperties .containsKey (S3Properties .Env .SECRET_KEY )
637
- && brokerDescProperties .containsKey (S3Properties .Env .REGION )) {
638
- String endpoint = brokerDescProperties .get (S3Properties .Env .ENDPOINT );
639
- endpoint = endpoint .replaceFirst ("^http://" , "" );
640
- endpoint = endpoint .replaceFirst ("^https://" , "" );
641
- brokerDescProperties .put (S3Properties .Env .ENDPOINT , endpoint );
604
+ if (brokerDesc .getFileType () != null && brokerDesc .getFileType ().equals (TFileType .FILE_S3 )) {
605
+
606
+ ObjectStorageProperties storageProperties = (ObjectStorageProperties ) brokerDesc .getStorageProperties ();
607
+ String endpoint = storageProperties .getEndpoint ();
608
+ checkEndpoint (endpoint );
642
609
checkWhiteList (endpoint );
643
- if (AzureProperties .checkAzureProviderPropertyExist (brokerDescProperties )) {
644
- return ;
610
+ //should add connectivity test
611
+ boolean connectivityTest = FileSystemFactory .get (brokerDesc .getStorageProperties ()).connectivityTest ();
612
+ if (!connectivityTest ) {
613
+ throw new UserException ("Failed to access object storage, message=connectivity test failed" );
645
614
}
646
- checkEndpoint (endpoint );
647
- checkAkSk ();
648
615
}
649
616
}
650
617
@@ -657,47 +624,6 @@ public void checkWhiteList(String endpoint) throws UserException {
657
624
}
658
625
}
659
626
660
- private void checkAkSk () throws UserException {
661
- RemoteBase remote = null ;
662
- ObjectInfo objectInfo = null ;
663
- String curFile = null ;
664
- try {
665
- Map <String , String > brokerDescProperties = brokerDesc .getProperties ();
666
- String provider = getProviderFromEndpoint ();
667
- for (DataDescription dataDescription : dataDescriptions ) {
668
- for (String filePath : dataDescription .getFilePaths ()) {
669
- curFile = filePath ;
670
- String bucket = getBucketFromFilePath (filePath );
671
- objectInfo = new ObjectInfo (ObjectStoreInfoPB .Provider .valueOf (provider .toUpperCase ()),
672
- brokerDescProperties .get (S3Properties .Env .ACCESS_KEY ),
673
- brokerDescProperties .get (S3Properties .Env .SECRET_KEY ),
674
- bucket , brokerDescProperties .get (S3Properties .Env .ENDPOINT ),
675
- brokerDescProperties .get (S3Properties .Env .REGION ), "" );
676
- remote = RemoteBase .newInstance (objectInfo );
677
- // RemoteBase#headObject does not throw exception if key does not exist.
678
- remote .headObject ("1" );
679
- remote .listObjects (null );
680
- remote .close ();
681
- }
682
- }
683
- } catch (Exception e ) {
684
- LOG .warn ("Failed to access object storage, file={}, proto={}, err={}" , curFile , objectInfo , e .toString ());
685
- String msg ;
686
- if (e instanceof UserException ) {
687
- msg = ((UserException ) e ).getDetailMessage ();
688
- } else {
689
- msg = e .getMessage ();
690
- }
691
- throw new UserException (InternalErrorCode .GET_REMOTE_DATA_ERROR ,
692
- "Failed to access object storage, message=" + msg , e );
693
- } finally {
694
- if (remote != null ) {
695
- remote .close ();
696
- }
697
- }
698
-
699
- }
700
-
701
627
@ Override
702
628
public StmtType stmtType () {
703
629
return StmtType .LOAD ;
0 commit comments