Vivien
		
	commited on
		
		
					Commit 
							
							·
						
						7a848b2
	
1
								Parent(s):
							
							74074fa
								
Switch to VIT-L/14
Browse files- app.py +5 -5
- embeddings.npy +2 -2
- embeddings2.npy +2 -2
    	
        app.py
    CHANGED
    
    | @@ -15,15 +15,15 @@ from st_clickable_images import clickable_images | |
| 15 | 
             
                },
         | 
| 16 | 
             
            )
         | 
| 17 | 
             
            def load():
         | 
| 18 | 
            -
                model = CLIPModel.from_pretrained("openai/clip-vit- | 
| 19 | 
            -
                processor = CLIPProcessor.from_pretrained("openai/clip-vit- | 
| 20 | 
             
                df = {0: pd.read_csv("data.csv"), 1: pd.read_csv("data2.csv")}
         | 
| 21 | 
             
                embeddings = {0: np.load("embeddings.npy"), 1: np.load("embeddings2.npy")}
         | 
| 22 | 
             
                for k in [0, 1]:
         | 
| 23 | 
            -
                    embeddings[k] = embeddings[k] - np.mean(embeddings[k], axis=0)
         | 
| 24 | 
             
                    embeddings[k] = embeddings[k] / np.linalg.norm(
         | 
| 25 | 
             
                        embeddings[k], axis=1, keepdims=True
         | 
| 26 | 
             
                    )
         | 
|  | |
| 27 | 
             
                return model, processor, df, embeddings
         | 
| 28 |  | 
| 29 |  | 
| @@ -46,7 +46,7 @@ def image_search(query, corpus, n_results=24): | |
| 46 | 
             
                    else:
         | 
| 47 | 
             
                        return np.concatenate((e1, e2), axis=0)
         | 
| 48 |  | 
| 49 | 
            -
                splitted_query = query.split(" | 
| 50 |  | 
| 51 | 
             
                positive_queries = splitted_query[0].split(";")
         | 
| 52 | 
             
                for positive_query in positive_queries:
         | 
| @@ -104,7 +104,7 @@ description = """ | |
| 104 | 
             
            howto = """
         | 
| 105 | 
             
            - Click on an image to use it as a query and find similar images
         | 
| 106 | 
             
            - Several queries, including one based on an image, can be combined (use "**;**" as a separator)
         | 
| 107 | 
            -
            - If the input includes " | 
| 108 | 
             
            """
         | 
| 109 |  | 
| 110 |  | 
|  | |
| 15 | 
             
                },
         | 
| 16 | 
             
            )
         | 
| 17 | 
             
            def load():
         | 
| 18 | 
            +
                model = CLIPModel.from_pretrained("openai/clip-vit-large-patch14")
         | 
| 19 | 
            +
                processor = CLIPProcessor.from_pretrained("openai/clip-vit-large-patch14")
         | 
| 20 | 
             
                df = {0: pd.read_csv("data.csv"), 1: pd.read_csv("data2.csv")}
         | 
| 21 | 
             
                embeddings = {0: np.load("embeddings.npy"), 1: np.load("embeddings2.npy")}
         | 
| 22 | 
             
                for k in [0, 1]:
         | 
|  | |
| 23 | 
             
                    embeddings[k] = embeddings[k] / np.linalg.norm(
         | 
| 24 | 
             
                        embeddings[k], axis=1, keepdims=True
         | 
| 25 | 
             
                    )
         | 
| 26 | 
            +
                    embeddings[k] = embeddings[k] - np.mean(embeddings[k], axis=0)
         | 
| 27 | 
             
                return model, processor, df, embeddings
         | 
| 28 |  | 
| 29 |  | 
|  | |
| 46 | 
             
                    else:
         | 
| 47 | 
             
                        return np.concatenate((e1, e2), axis=0)
         | 
| 48 |  | 
| 49 | 
            +
                splitted_query = query.split(" EXCLUDING ")
         | 
| 50 |  | 
| 51 | 
             
                positive_queries = splitted_query[0].split(";")
         | 
| 52 | 
             
                for positive_query in positive_queries:
         | 
|  | |
| 104 | 
             
            howto = """
         | 
| 105 | 
             
            - Click on an image to use it as a query and find similar images
         | 
| 106 | 
             
            - Several queries, including one based on an image, can be combined (use "**;**" as a separator)
         | 
| 107 | 
            +
            - If the input includes "**EXCLUDING**", the part right of it will be used as a negative query
         | 
| 108 | 
             
            """
         | 
| 109 |  | 
| 110 |  | 
    	
        embeddings.npy
    CHANGED
    
    | @@ -1,3 +1,3 @@ | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            -
            oid sha256: | 
| 3 | 
            -
            size  | 
|  | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:64515f7d3d71137e2944f2c3d72c8df3e684b5d6a6ff7dcebb92370f7326ccfd
         | 
| 3 | 
            +
            size 76800128
         | 
    	
        embeddings2.npy
    CHANGED
    
    | @@ -1,3 +1,3 @@ | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            -
            oid sha256: | 
| 3 | 
            -
            size  | 
|  | |
| 1 | 
             
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:3d730b33e758c2648419a96ac86d39516c59795e613c35700d3a64079e5a9a27
         | 
| 3 | 
            +
            size 25098368
         | 
