myexperiment-hackers
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[myexperiment-hackers] [3415] trunk: Filtering and tests for components


From: noreply
Subject: [myexperiment-hackers] [3415] trunk: Filtering and tests for components API
Date: Mon, 11 Feb 2013 16:00:24 +0000 (UTC)

Revision
3415
Author
fbacall
Date
2013-02-11 16:00:22 +0000 (Mon, 11 Feb 2013)

Log Message

Filtering and tests for components API

Modified Paths

Added Paths

Diff

Modified: trunk/app/controllers/application_controller.rb (3414 => 3415)


--- trunk/app/controllers/application_controller.rb	2013-02-11 13:57:40 UTC (rev 3414)
+++ trunk/app/controllers/application_controller.rb	2013-02-11 16:00:22 UTC (rev 3415)
@@ -6,7 +6,10 @@
 # Filters added to this controller apply to all controllers in the application.
 # Likewise, all the methods added will be available for all controllers.
 
+require 'pivoting'
+
 class ApplicationController < ActionController::Base
+
   helper :all # include all helpers, all the time
   protect_from_forgery # See ActionController::RequestForgeryProtection for details
 
@@ -428,593 +431,6 @@
     send_file(file_name, *opts)
   end
 
-  # Pivot code
-
-  def calculate_pivot(opts = {})
-
-    begin
-      expr = parse_filter_expression(opts[:params]["filter"], opts[:pivot_options], :active_filters => opts[:active_filters])
-    rescue Exception => ex
-      problem = "Problem with query _expression_: #{ex}"
-    end
-
-    pivot = contributions_list(opts[:params], opts[:user], opts[:pivot_options],
-        :model            => opts[:model],
-        :auth_type        => opts[:auth_type],
-        :auth_id          => opts[:auth_id],
-        :group_by         => opts[:group_by],
-        :active_filters   => opts[:active_filters],
-        :lock_filter      => opts[:locked_filters],
-        :search_models    => opts[:search_models],
-        :search_limit     => opts[:search_limit],
-        :filters          => expr)
-
-    [pivot, problem]
-  end
-  
-  TOKEN_UNKNOWN         = 0x0000
-  TOKEN_AND             = 0x0001
-  TOKEN_OR              = 0x0002
-  TOKEN_WORD            = 0x0003
-  TOKEN_OPEN            = 0x0004
-  TOKEN_CLOSE           = 0x0005
-  TOKEN_STRING          = 0x0006
-  TOKEN_EOS             = 0x00ff
-
-  NUM_TOKENS            = 6
-
-  STATE_INITIAL         = 0x0000
-  STATE_EXPECT_OPEN     = 0x0100
-  STATE_EXPECT_STR      = 0x0200
-  STATE_EXPECT_EXPR_END = 0x0300
-  STATE_EXPECT_END      = 0x0400
-  STATE_COMPLETE        = 0x0500
-
-  def parse_filter_expression(expr, pivot_options, opts = {})
-
-    def unescape_string(str)
-      str.match(/^"(.*)"$/)[1].gsub(/\\"/, '"')
-    end
-
-    return nil if expr.nil?
-
-    state  = STATE_INITIAL
-    data   = ""
-
-    begin
-
-      tokens = expr.match(/^
-
-          \s* (\sAND\s)         | # AND operator
-          \s* (\sOR\s)          | # OR operator
-          \s* (\w+)             | # a non-keyword word
-          \s* (\()              | # an open paranthesis
-          \s* (\))              | # a close paranthesis
-          \s* ("(\\.|[^\\"])*")   # double quoted string with backslash escapes
-
-          /ix)
-
-      if tokens.nil?
-        token = TOKEN_UNKNOWN
-      else
-        (1..NUM_TOKENS).each do |i|
-          token = i if tokens[i]
-        end
-      end
-
-      if token == TOKEN_UNKNOWN
-        token = TOKEN_EOS if expr.strip.empty?
-      end
-
-      case state | token
-        when STATE_INITIAL         | TOKEN_WORD   : state = STATE_EXPECT_OPEN     ; data << { :name => tokens[0], :expr => [] }
-        when STATE_EXPECT_OPEN     | TOKEN_OPEN   : state = STATE_EXPECT_STR
-        when STATE_EXPECT_STR      | TOKEN_STRING : state = STATE_EXPECT_EXPR_END ; data.last[:expr] << tokens[0] 
-        when STATE_EXPECT_EXPR_END | TOKEN_AND    : state = STATE_EXPECT_STR      ; data.last[:expr] << :and 
-        when STATE_EXPECT_EXPR_END | TOKEN_OR     : state = STATE_EXPECT_STR      ; data.last[:expr] << :or 
-        when STATE_EXPECT_EXPR_END | TOKEN_CLOSE  : state = STATE_EXPECT_END
-        when STATE_EXPECT_END      | TOKEN_AND    : state = STATE_INITIAL         ; data << :and 
-        when STATE_EXPECT_END      | TOKEN_OR     : state = STATE_INITIAL         ; data << :or 
-        when STATE_EXPECT_END      | TOKEN_EOS    : state = STATE_COMPLETE
-
-        else raise "Error parsing query _expression_"
-      end
-
-      expr = tokens.post_match unless state == STATE_COMPLETE
-
-    end while state != STATE_COMPLETE
-
-    # validate and reduce expressions to current capabilities
-
-    valid_filters = pivot_options["filters"].map do |f| f["query_option"] end
-    valid_filters = valid_filters.select do |f| opts[:active_filters].include?(f) end
-
-    data.each do |category|
-      case category
-      when :or
-        raise "Unsupported query _expression_"
-      when :and
-        # Fine
-      else
-        raise "Unknown filter category" unless valid_filters.include?(category[:name])
-
-        counts = { :and => 0, :or => 0 }
-
-        category[:expr].each do |bit|
-          counts[bit] = counts[bit] + 1 if bit.class == Symbol
-        end
-
-        raise "Unsupported query _expression_" if counts[:and] > 0 && counts[:or] > 0
-
-        # haven't implemented 'and' within a particular filter yet
-        raise "Unsupported query _expression_" if counts[:and] > 0
-
-        if category[:expr].length == 1
-          category[:expr] = { :terms => [unescape_string(category[:expr].first)] }
-        else
-          category[:expr] = {
-            :operator => category[:expr][1],
-            :terms    => category[:expr].select do |t|
-              t.class == String
-            end.map do |t|
-              unescape_string(t)
-            end
-          }
-        end
-      end
-    end
-
-    data
-  end
-
-  def contributions_list(params = nil, user = nil, pivot_options = nil, opts = {})
-
-    def escape_sql(str)
-      str.gsub(/\\/, '\&\&').gsub(/'/, "''")
-    end
-
-    def build_url(params, opts, expr, parts, pivot_options, extra = {})
-
-      query = {}
-
-      if parts.include?(:filter)
-        bits = []
-        pivot_options["filters"].each do |filter|
-          if !opts[:lock_filter] || opts[:lock_filter][filter["query_option"]].nil?
-            if find_filter(expr, filter["query_option"])
-              bits << filter["query_option"] + "(\"" + find_filter(expr, filter["query_option"])[:expr][:terms].map do |t| t.gsub(/"/, '\"') end.join("\" OR \"") + "\")"
-            end
-          end
-        end
-
-        if bits.length > 0
-          query["filter"] = bits.join(" AND ")
-        end
-      end
-
-      query["query"]        = params[:query]        if params[:query]
-      query["order"]        = params[:order]        if parts.include?(:order)
-      query["filter_query"] = params[:filter_query] if parts.include?(:filter_query)
-
-      query.merge!(extra)
-
-      query
-    end
-
-    def comparison(lhs, rhs)
-      if rhs.length == 1
-        "#{lhs} = '#{escape_sql(rhs.first)}'"
-      else
-        "#{lhs} IN ('#{rhs.map do |bit| escape_sql(bit) end.join("', '")}')"
-      end
-    end
-
-    def create_search_results_table(search_query, opts)
-
-      begin
-        solr_results = opts[:search_models].first.multi_solr_search(search_query,
-            :models         => opts[:search_models],
-            :limit          => opts[:search_limit],
-            :results_format => :ids)
-      rescue
-        return false
-      end
-
-      conn = ActiveRecord::Base.connection
-
-      conn.execute("CREATE TEMPORARY TABLE search_results (id INT AUTO_INCREMENT UNIQUE KEY, result_type VARCHAR(255), result_id INT)")
-
-      # This next part converts the search results to SQL values
-      #
-      # from:  { "id" => "Workflow:4" }, { "id" => "Pack:6" }, ...
-      # to:    "(NULL, 'Workflow', '4'), (NULL, 'Pack', '6'), ..."
-
-      if solr_results.results.length > 0
-        insert_part = solr_results.results.map do |result|
-          "(NULL, " + result["id"].split(":").map do |bit|
-            "'#{bit}'"
-          end.join(", ") + ")"
-        end.join(", ")
- 
-        conn.execute("INSERT INTO search_results VALUES #{insert_part}")
-      end
-
-      true
-    end
-
-    def drop_search_results_table
-      ActiveRecord::Base.connection.execute("DROP TABLE IF EXISTS search_results")
-    end
-
-    def column(column, opts)
-      if column == :auth_type
-        opts[:auth_type]
-      else
-        column
-      end
-    end
-
-    def calculate_filter(collection, params, filter, pivot_options, user, opts = {})
-
-      # apply all the joins and conditions except for the current filter
-
-      joins      = []
-      conditions = []
-
-      pivot_options["filters"].each do |other_filter|
-        if filter_list = find_filter(opts[:filters], other_filter["query_option"])
-          unless opts[:inhibit_other_conditions]
-            conditions << comparison(column(other_filter["id_column"], opts), filter_list[:expr][:terms]) unless other_filter == filter
-          end
-          joins += other_filter["joins"] if other_filter["joins"]
-        end
-      end
-
-      filter_id_column    = column(filter["id_column"],    opts)
-      filter_label_column = column(filter["label_column"], opts)
-
-      joins += filter["joins"] if filter["joins"]
-      conditions << "#{filter_id_column} IS NOT NULL" if filter["not_null"]
-
-      unless opts[:inhibit_filter_query]
-        if params[:filter_query]
-          conditions << "(#{filter_label_column} LIKE '%#{escape_sql(params[:filter_query])}%')"
-        end
-      end
-
-      current = find_filter(opts[:filters], filter["query_option"]) ? find_filter(opts[:filters], filter["query_option"])[:expr][:terms] : []
-
-      if opts[:ids].nil?
-        limit = 10
-      else
-        conditions << "(#{filter_id_column} IN ('#{opts[:ids].map do |id| escape_sql(id) end.join("','")}'))"
-        limit = nil
-      end
-
-      conditions = conditions.length.zero? ? nil : conditions.join(" AND ")
-
-      count_expr = "COUNT(DISTINCT #{opts[:auth_type]}, #{opts[:auth_id]})"
-
-      objects = collection.find(
-          :all,
-          :select => "#{filter_id_column} AS filter_id, #{filter_label_column} AS filter_label, #{count_expr} AS filter_count",
-          :joins => merge_joins(joins, pivot_options, collection.permission_conditions, :auth_type => opts[:auth_type], :auth_id => opts[:auth_id]),
-          :conditions => conditions,
-          :group => "#{filter_id_column}",
-          :limit => limit,
-          :order => "#{count_expr} DESC, #{filter_label_column}")
-
-      objects = objects.select do |x| !x[:filter_id].nil? end
-
-      objects = objects.map do |object|
-
-        value = object.filter_id.to_s
-        selected = current.include?(value)
-
-        label_expr = deep_clone(opts[:filters])
-        label_expr -= [find_filter(label_expr, filter["query_option"])] if find_filter(label_expr, filter["query_option"])
-
-        unless selected && current.length == 1
-          label_expr << { :name => filter["query_option"], :expr => { :terms => [value] } }
-        end
-
-        checkbox_expr = deep_clone(opts[:filters])
-
-        if expr_filter = find_filter(checkbox_expr, filter["query_option"])
-
-          if selected
-            expr_filter[:expr][:terms] -= [value]
-          else
-            expr_filter[:expr][:terms] += [value]
-          end
-
-          checkbox_expr -= [expr_filter] if expr_filter[:expr][:terms].empty?
-
-        else
-          checkbox_expr << { :name => filter["query_option"], :expr => { :terms => [value] } }
-        end
-
-        label_uri = build_url(params, opts, label_expr, [:filter, :order], pivot_options, "page" => nil)
-
-        checkbox_uri = build_url(params, opts, checkbox_expr, [:filter, :order], pivot_options, "page" => nil)
-
-        label = object.filter_label.clone
-        label = visible_name(label) if filter["visible_name"]
-        label = label.capitalize    if filter["capitalize"]
-
-        plain_label = object.filter_label
-
-        if params[:filter_query]
-          label.sub!(Regexp.new("(#{params[:filter_query]})", Regexp::IGNORECASE), '<b>\1</b>')
-        end
-
-        {
-          :object       => object,
-          :value        => value,
-          :label        => label,
-          :plain_label  => plain_label,
-          :count        => object.filter_count,
-          :checkbox_uri => checkbox_uri,
-          :label_uri    => label_uri,
-          :selected     => selected
-        }
-      end
-
-      [current, objects]
-    end
-
-    def calculate_filters(collection, params, opts, pivot_options, user)
-
-      # produce the filter list
-
-      filters = deep_clone(pivot_options["filters"])
-      cancel_filter_query_url = nil
-
-      filters.each do |filter|
-
-        # calculate the top n items of the list
-
-        filter[:current], filter[:objects] = calculate_filter(collection, params, filter, pivot_options, user, opts)
-
-        # calculate which active filters are missing (because they weren't in the
-        # top part of the list or have a count of zero)
-
-        missing_filter_ids = filter[:current] - filter[:objects].map do |ob| ob[:value] end
-
-        if missing_filter_ids.length > 0
-          filter[:objects] += calculate_filter(collection, params, filter, pivot_options, user, opts.merge(:ids => missing_filter_ids))[1]
-        end
-
-        # calculate which active filters are still missing (because they have a
-        # count of zero)
-
-        missing_filter_ids = filter[:current] - filter[:objects].map do |ob| ob[:value] end
-        
-        if missing_filter_ids.length > 0
-          zero_list = calculate_filter(collection, params, filter, pivot_options, user, opts.merge(:ids => missing_filter_ids, :inhibit_other_conditions => true))[1]
-
-          zero_list.each do |x| x[:count] = 0 end
-
-          zero_list.sort! do |a, b| a[:label] <=> b[:label] end
-
-          filter[:objects] += zero_list
-        end
-      end
-
-      [filters, cancel_filter_query_url]
-    end
-
-    def find_filter(filters, name)
-      filters.find do |f|
-        f[:name] == name
-      end
-    end
-
-    def merge_joins(joins, pivot_options, permission_conditions, opts = {})
-      if joins.length.zero?
-        nil
-      else
-        joins.uniq.map do |j|
-          text = pivot_options["joins"][j].clone
-          text.gsub!(/RESULT_TYPE/,         opts[:auth_type])
-          text.gsub!(/RESULT_ID/,           opts[:auth_id])
-          text.gsub!(/VIEW_CONDITIONS/,     permission_conditions[:view_conditions])
-          text.gsub!(/DOWNLOAD_CONDITIONS/, permission_conditions[:download_conditions])
-          text.gsub!(/EDIT_CONDITIONS/,     permission_conditions[:edit_conditions])
-          text
-        end.join(" ")
-      end
-    end
-
-    pivot_options["filters"] = pivot_options["filters"].select do |f|
-      opts[:active_filters].include?(f["query_option"])
-    end
-
-    joins      = []
-    conditions = []
-
-    # parse the filter _expression_ if provided.  convert filter _expression_ to
-    # the old format.  this will need to be replaced eventually
-
-    opts[:filters] ||= []
-    
-    include_reset_url = opts[:filters].length > 0
-
-    # filter out top level logic operators for now
-
-    opts[:filters] = opts[:filters].select do |bit|
-      bit.class == Hash
-    end
-
-    # apply locked filters
-
-    if opts[:lock_filter]
-      opts[:lock_filter].each do |filter, value|
-        opts[:filters] << { :name => filter, :expr => { :terms => [value] } }
-      end
-    end
-
-    # perform search if requested
-
-    query_problem = false
-
-    if params["query"]
-      drop_search_results_table
-      if !create_search_results_table(params["query"], opts)
-        params["query"] = nil
-        query_problem = true
-      end
-    end
-
-    if params[:query]
-      klass     = SearchResult
-      auth_type = "search_results.result_type"
-      auth_id   = "search_results.result_id"
-      group_by  = "search_results.result_type, search_results.result_id"
-    else
-      klass     = opts[:model]     || Contribution
-      auth_type = opts[:auth_type] || "contributions.contributable_type"
-      auth_id   = opts[:auth_id]   || "contributions.contributable_id"
-      group_by  = opts[:group_by]  || "contributions.contributable_type, contributions.contributable_id"
-    end
-
-    # determine joins, conditions and order for the main results
-
-    pivot_options["filters"].each do |filter|
-      if filter_list = find_filter(opts[:filters], filter["query_option"])
-        conditions << comparison(column(filter["id_column"], opts.merge( { :auth_type => auth_type, :auth_id => auth_id } )), filter_list[:expr][:terms])
-        joins += filter["joins"] if filter["joins"]
-      end
-    end
-
-    order_options = pivot_options["order"].find do |x|
-      x["option"] == params[:order]
-    end
-
-    order_options ||= pivot_options["order"].first
-
-    joins += order_options["joins"] if order_options["joins"]
-
-    having_bits = []
-
-#   pivot_options["filters"].each do |filter|
-#     if params["and_#{filter["query_option"]}"]
-#       having_bits << "GROUP_CONCAT(DISTINCT #{filter["id_column"]} ORDER BY #{filter["id_column"]}) = \"#{escape_sql(opts[:filters][filter["query_option"]])}\""
-#     end
-#   end
-
-    having_clause = ""
-
-    if having_bits.length > 0
-      having_clause = "HAVING #{having_bits.join(' AND ')}"
-    end
-
-    # perform the results query
-
-    collection = Authorization.scoped(klass,
-        :authorised_user => user,
-        :include_permissions => true,
-        :auth_type => auth_type,
-        :auth_id => auth_id)
-
-    results = collection.find(
-        :all,
-        :page => { :size => params["num"] ? params["num"].to_i : nil, :current => params["page"] },
-        :joins => merge_joins(joins, pivot_options, collection.permission_conditions, :auth_type => auth_type, :auth_id => auth_id),
-        :conditions => conditions.length.zero? ? nil : conditions.join(" AND "),
-        :group => "#{group_by} #{having_clause}",
-        :order => order_options["order"])
-        
-    # produce a query hash to match the current filters
-
-    opts[:filter_params] = {}
-
-    pivot_options["filters"].each do |filter|
-      if params[filter["query_option"]]
-        next if opts[:lock_filter] && opts[:lock_filter][filter["query_option"]]
-        opts[:filter_params][filter["query_option"]] = params[filter["query_option"]]
-      end
-    end
-
-    # produce the filter list
-
-    opts_for_filter_query = opts.merge( { :auth_type => auth_type,
-        :auth_id => auth_id, :group_by => group_by } )
-
-    filters, cancel_filter_query_url = calculate_filters(collection, params, opts_for_filter_query, pivot_options, user)
-
-    # produce the summary.  If a filter query is specified, then we need to
-    # recalculate the filters without the query to get all of them.
-
-    if params[:filter_query]
-      filters2 = calculate_filters(collection, params, opts_for_filter_query.merge( { :inhibit_filter_query => true } ), pivot_options, user)[0]
-    else
-      filters2 = filters
-    end
-
-    summary = ""
-
-    filters2.select do |filter|
-
-      next if opts[:lock_filter] && opts[:lock_filter][filter["query_option"]]
-
-      selected = filter[:objects].select do |x| x[:selected] end
-      current  = selected.map do |x| x[:value] end
-
-      if selected.length > 0
-        selected_labels = selected.map do |x|
-
-          expr = deep_clone(opts[:filters])
-
-          f = find_filter(expr, filter["query_option"])
-  
-          expr -= f[:expr][:terms] -= [x[:value]]
-          expr -= [f] if f[:expr][:terms].empty?
-
-          x[:plain_label] + ' <a href="" + url_for(build_url(params, opts, expr,
-          [:filter, :filter_query, :order], pivot_options)) +
-            '">' + " <img src='' /></a>"
-
-        end
-
-        bits = selected_labels.map do |label| label end.join(" <i>or</i> ")
-
-        summary << '<span class="filter-in-use"><b>' + filter["title"].capitalize + "</b>: " + bits + "</span> "
-      end
-    end
-
-    if params[:filter_query]
-      cancel_filter_query_url = build_url(params, opts, opts[:filters], [:filter, :order], pivot_options)
-    end
-
-    if include_reset_url
-      reset_filters_url = build_url(params, opts, opts[:filters], [:order], pivot_options)
-    end
-
-    # remove filters that do not help in narrowing down the result set
-
-    filters = filters.select do |filter|
-      if filter[:objects].empty?
-        false
-      elsif opts[:lock_filter] && opts[:lock_filter][filter["query_option"]]
-        false
-      else
-        true
-      end
-    end
-
-    {
-      :results                 => results,
-      :filters                 => filters,
-      :reset_filters_url       => reset_filters_url,
-      :cancel_filter_query_url => cancel_filter_query_url,
-      :filter_query_url        => build_url(params, opts, opts[:filters], [:filter], pivot_options),
-      :summary                 => summary,
-      :pivot_options           => pivot_options,
-      :query_problem           => query_problem
-    }
-  end
-
   #Applies the layout for the Network with the given network_id to the object (contributable)
   def update_layout(object,network_id)
     if object.is_a?(Policy)

Added: trunk/lib/pivoting.rb (0 => 3415)


--- trunk/lib/pivoting.rb	                        (rev 0)
+++ trunk/lib/pivoting.rb	2013-02-11 16:00:22 UTC (rev 3415)
@@ -0,0 +1,603 @@
+# Pivot code
+
+TOKEN_UNKNOWN         = 0x0000
+TOKEN_AND             = 0x0001
+TOKEN_OR              = 0x0002
+TOKEN_WORD            = 0x0003
+TOKEN_OPEN            = 0x0004
+TOKEN_CLOSE           = 0x0005
+TOKEN_STRING          = 0x0006
+TOKEN_EOS             = 0x00ff
+
+NUM_TOKENS            = 6
+
+STATE_INITIAL         = 0x0000
+STATE_EXPECT_OPEN     = 0x0100
+STATE_EXPECT_STR      = 0x0200
+STATE_EXPECT_EXPR_END = 0x0300
+STATE_EXPECT_END      = 0x0400
+STATE_COMPLETE        = 0x0500
+
+def calculate_pivot(opts = {})
+
+  begin
+    expr = parse_filter_expression(opts[:params]["filter"], opts[:pivot_options], :active_filters => opts[:active_filters])
+  rescue Exception => ex
+    problem = "Problem with query _expression_: #{ex}"
+  end
+
+  pivot = contributions_list(opts[:params], opts[:user], opts[:pivot_options],
+                             :model            => opts[:model],
+                             :auth_type        => opts[:auth_type],
+                             :auth_id          => opts[:auth_id],
+                             :group_by         => opts[:group_by],
+                             :active_filters   => opts[:active_filters],
+                             :lock_filter      => opts[:locked_filters],
+                             :search_models    => opts[:search_models],
+                             :search_limit     => opts[:search_limit],
+                             :no_pagination    => opts[:no_pagination],
+                             :filters          => expr)
+
+  [pivot, problem]
+end
+
+def parse_filter_expression(expr, pivot_options, opts = {})
+
+  def unescape_string(str)
+    str.match(/^"(.*)"$/)[1].gsub(/\\"/, '"')
+  end
+
+  return nil if expr.nil?
+
+  state  = STATE_INITIAL
+  data   = ""
+
+  begin
+
+    tokens = expr.match(/^
+
+          \s* (\sAND\s)         | # AND operator
+          \s* (\sOR\s)          | # OR operator
+          \s* (\w+)             | # a non-keyword word
+          \s* (\()              | # an open paranthesis
+          \s* (\))              | # a close paranthesis
+          \s* ("(\\.|[^\\"])*")   # double quoted string with backslash escapes
+
+          /ix)
+
+    if tokens.nil?
+      token = TOKEN_UNKNOWN
+    else
+      (1..NUM_TOKENS).each do |i|
+        token = i if tokens[i]
+      end
+    end
+
+    if token == TOKEN_UNKNOWN
+      token = TOKEN_EOS if expr.strip.empty?
+    end
+
+    case state | token
+      when STATE_INITIAL         | TOKEN_WORD   : state = STATE_EXPECT_OPEN     ; data << { :name => tokens[0], :expr => [] }
+      when STATE_EXPECT_OPEN     | TOKEN_OPEN   : state = STATE_EXPECT_STR
+      when STATE_EXPECT_STR      | TOKEN_STRING : state = STATE_EXPECT_EXPR_END ; data.last[:expr] << tokens[0]
+      when STATE_EXPECT_EXPR_END | TOKEN_AND    : state = STATE_EXPECT_STR      ; data.last[:expr] << :and
+      when STATE_EXPECT_EXPR_END | TOKEN_OR     : state = STATE_EXPECT_STR      ; data.last[:expr] << :or
+      when STATE_EXPECT_EXPR_END | TOKEN_CLOSE  : state = STATE_EXPECT_END
+      when STATE_EXPECT_END      | TOKEN_AND    : state = STATE_INITIAL         ; data << :and
+      when STATE_EXPECT_END      | TOKEN_OR     : state = STATE_INITIAL         ; data << :or
+      when STATE_EXPECT_END      | TOKEN_EOS    : state = STATE_COMPLETE
+
+      else raise "Error parsing query _expression_"
+    end
+
+    expr = tokens.post_match unless state == STATE_COMPLETE
+
+  end while state != STATE_COMPLETE
+
+  # validate and reduce expressions to current capabilities
+
+  valid_filters = pivot_options["filters"].map do |f| f["query_option"] end
+  valid_filters = valid_filters.select do |f| opts[:active_filters].include?(f) end
+
+  data.each do |category|
+    case category
+      when :or
+        raise "Unsupported query _expression_"
+      when :and
+        # Fine
+      else
+        raise "Unknown filter category" unless valid_filters.include?(category[:name])
+
+        counts = { :and => 0, :or => 0 }
+
+        category[:expr].each do |bit|
+          counts[bit] = counts[bit] + 1 if bit.class == Symbol
+        end
+
+        raise "Unsupported query _expression_" if counts[:and] > 0 && counts[:or] > 0
+
+        # haven't implemented 'and' within a particular filter yet
+        raise "Unsupported query _expression_" if counts[:and] > 0
+
+        if category[:expr].length == 1
+          category[:expr] = { :terms => [unescape_string(category[:expr].first)] }
+        else
+          category[:expr] = {
+              :operator => category[:expr][1],
+              :terms    => category[:expr].select do |t|
+                t.class == String
+              end.map do |t|
+                unescape_string(t)
+              end
+          }
+        end
+    end
+  end
+
+  data
+end
+
+def contributions_list(params = nil, user = nil, pivot_options = nil, opts = {})
+
+  def escape_sql(str)
+    str.gsub(/\\/, '\&\&').gsub(/'/, "''")
+  end
+
+  def build_url(params, opts, expr, parts, pivot_options, extra = {})
+
+    query = {}
+
+    if parts.include?(:filter)
+      bits = []
+      pivot_options["filters"].each do |filter|
+        if !opts[:lock_filter] || opts[:lock_filter][filter["query_option"]].nil?
+          if find_filter(expr, filter["query_option"])
+            bits << filter["query_option"] + "(\"" + find_filter(expr, filter["query_option"])[:expr][:terms].map do |t| t.gsub(/"/, '\"') end.join("\" OR \"") + "\")"
+          end
+        end
+      end
+
+      if bits.length > 0
+        query["filter"] = bits.join(" AND ")
+      end
+    end
+
+    query["query"]        = params[:query]        if params[:query]
+    query["order"]        = params[:order]        if parts.include?(:order)
+    query["filter_query"] = params[:filter_query] if parts.include?(:filter_query)
+
+    query.merge!(extra)
+
+    query
+  end
+
+  def comparison(lhs, rhs)
+    if rhs.length == 1
+      "#{lhs} = '#{escape_sql(rhs.first)}'"
+    else
+      "#{lhs} IN ('#{rhs.map do |bit| escape_sql(bit) end.join("', '")}')"
+    end
+  end
+
+  def create_search_results_table(search_query, opts)
+
+    begin
+      solr_results = opts[:search_models].first.multi_solr_search(search_query,
+                                                                  :models         => opts[:search_models],
+                                                                  :limit          => opts[:search_limit],
+                                                                  :results_format => :ids)
+    rescue
+      return false
+    end
+
+    conn = ActiveRecord::Base.connection
+
+    conn.execute("CREATE TEMPORARY TABLE search_results (id INT AUTO_INCREMENT UNIQUE KEY, result_type VARCHAR(255), result_id INT)")
+
+    # This next part converts the search results to SQL values
+    #
+    # from:  { "id" => "Workflow:4" }, { "id" => "Pack:6" }, ...
+    # to:    "(NULL, 'Workflow', '4'), (NULL, 'Pack', '6'), ..."
+
+    if solr_results.results.length > 0
+      insert_part = solr_results.results.map do |result|
+        "(NULL, " + result["id"].split(":").map do |bit|
+          "'#{bit}'"
+        end.join(", ") + ")"
+      end.join(", ")
+
+      conn.execute("INSERT INTO search_results VALUES #{insert_part}")
+    end
+
+    true
+  end
+
+  def drop_search_results_table
+    ActiveRecord::Base.connection.execute("DROP TABLE IF EXISTS search_results")
+  end
+
+  def column(column, opts)
+    if column == :auth_type
+      opts[:auth_type]
+    else
+      column
+    end
+  end
+
+  def calculate_filter(collection, params, filter, pivot_options, user, opts = {})
+
+    # apply all the joins and conditions except for the current filter
+
+    joins      = []
+    conditions = []
+
+    pivot_options["filters"].each do |other_filter|
+      if filter_list = find_filter(opts[:filters], other_filter["query_option"])
+        unless opts[:inhibit_other_conditions]
+          conditions << comparison(column(other_filter["id_column"], opts), filter_list[:expr][:terms]) unless other_filter == filter
+        end
+        joins += other_filter["joins"] if other_filter["joins"]
+      end
+    end
+
+    filter_id_column    = column(filter["id_column"],    opts)
+    filter_label_column = column(filter["label_column"], opts)
+
+    joins += filter["joins"] if filter["joins"]
+    conditions << "#{filter_id_column} IS NOT NULL" if filter["not_null"]
+
+    unless opts[:inhibit_filter_query]
+      if params[:filter_query]
+        conditions << "(#{filter_label_column} LIKE '%#{escape_sql(params[:filter_query])}%')"
+      end
+    end
+
+    current = find_filter(opts[:filters], filter["query_option"]) ? find_filter(opts[:filters], filter["query_option"])[:expr][:terms] : []
+
+    if opts[:ids].nil?
+      limit = 10
+    else
+      conditions << "(#{filter_id_column} IN ('#{opts[:ids].map do |id| escape_sql(id) end.join("','")}'))"
+      limit = nil
+    end
+
+    conditions = conditions.length.zero? ? nil : conditions.join(" AND ")
+
+    count_expr = "COUNT(DISTINCT #{opts[:auth_type]}, #{opts[:auth_id]})"
+
+    objects = collection.find(
+        :all,
+        :select => "#{filter_id_column} AS filter_id, #{filter_label_column} AS filter_label, #{count_expr} AS filter_count",
+        :joins => merge_joins(joins, pivot_options, collection.permission_conditions, :auth_type => opts[:auth_type], :auth_id => opts[:auth_id]),
+        :conditions => conditions,
+        :group => "#{filter_id_column}",
+        :limit => limit,
+        :order => "#{count_expr} DESC, #{filter_label_column}")
+
+    objects = objects.select do |x| !x[:filter_id].nil? end
+
+    objects = objects.map do |object|
+
+      value = object.filter_id.to_s
+      selected = current.include?(value)
+
+      label_expr = deep_clone(opts[:filters])
+      label_expr -= [find_filter(label_expr, filter["query_option"])] if find_filter(label_expr, filter["query_option"])
+
+      unless selected && current.length == 1
+        label_expr << { :name => filter["query_option"], :expr => { :terms => [value] } }
+      end
+
+      checkbox_expr = deep_clone(opts[:filters])
+
+      if expr_filter = find_filter(checkbox_expr, filter["query_option"])
+
+        if selected
+          expr_filter[:expr][:terms] -= [value]
+        else
+          expr_filter[:expr][:terms] += [value]
+        end
+
+        checkbox_expr -= [expr_filter] if expr_filter[:expr][:terms].empty?
+
+      else
+        checkbox_expr << { :name => filter["query_option"], :expr => { :terms => [value] } }
+      end
+
+      label_uri = build_url(params, opts, label_expr, [:filter, :order], pivot_options, "page" => nil)
+
+      checkbox_uri = build_url(params, opts, checkbox_expr, [:filter, :order], pivot_options, "page" => nil)
+
+      label = object.filter_label.clone
+      label = visible_name(label) if filter["visible_name"]
+      label = label.capitalize    if filter["capitalize"]
+
+      plain_label = object.filter_label
+
+      if params[:filter_query]
+        label.sub!(Regexp.new("(#{params[:filter_query]})", Regexp::IGNORECASE), '<b>\1</b>')
+      end
+
+      {
+          :object       => object,
+          :value        => value,
+          :label        => label,
+          :plain_label  => plain_label,
+          :count        => object.filter_count,
+          :checkbox_uri => checkbox_uri,
+          :label_uri    => label_uri,
+          :selected     => selected
+      }
+    end
+
+    [current, objects]
+  end
+
+  def calculate_filters(collection, params, opts, pivot_options, user)
+
+    # produce the filter list
+
+    filters = deep_clone(pivot_options["filters"])
+    cancel_filter_query_url = nil
+
+    filters.each do |filter|
+
+      # calculate the top n items of the list
+
+      filter[:current], filter[:objects] = calculate_filter(collection, params, filter, pivot_options, user, opts)
+
+      # calculate which active filters are missing (because they weren't in the
+      # top part of the list or have a count of zero)
+
+      missing_filter_ids = filter[:current] - filter[:objects].map do |ob| ob[:value] end
+
+      if missing_filter_ids.length > 0
+        filter[:objects] += calculate_filter(collection, params, filter, pivot_options, user, opts.merge(:ids => missing_filter_ids))[1]
+      end
+
+      # calculate which active filters are still missing (because they have a
+      # count of zero)
+
+      missing_filter_ids = filter[:current] - filter[:objects].map do |ob| ob[:value] end
+
+      if missing_filter_ids.length > 0
+        zero_list = calculate_filter(collection, params, filter, pivot_options, user, opts.merge(:ids => missing_filter_ids, :inhibit_other_conditions => true))[1]
+
+        zero_list.each do |x| x[:count] = 0 end
+
+        zero_list.sort! do |a, b| a[:label] <=> b[:label] end
+
+        filter[:objects] += zero_list
+      end
+    end
+
+    [filters, cancel_filter_query_url]
+  end
+
+  def find_filter(filters, name)
+    filters.find do |f|
+      f[:name] == name
+    end
+  end
+
+  def merge_joins(joins, pivot_options, permission_conditions, opts = {})
+    if joins.length.zero?
+      nil
+    else
+      joins.uniq.map do |j|
+        text = pivot_options["joins"][j].clone
+        text.gsub!(/RESULT_TYPE/,         opts[:auth_type])
+        text.gsub!(/RESULT_ID/,           opts[:auth_id])
+        text.gsub!(/VIEW_CONDITIONS/,     permission_conditions[:view_conditions])
+        text.gsub!(/DOWNLOAD_CONDITIONS/, permission_conditions[:download_conditions])
+        text.gsub!(/EDIT_CONDITIONS/,     permission_conditions[:edit_conditions])
+        text
+      end.join(" ")
+    end
+  end
+
+  pivot_options["filters"] = pivot_options["filters"].select do |f|
+    opts[:active_filters].include?(f["query_option"])
+  end
+
+  joins      = []
+  conditions = []
+
+  # parse the filter _expression_ if provided.  convert filter _expression_ to
+  # the old format.  this will need to be replaced eventually
+
+  opts[:filters] ||= []
+
+  include_reset_url = opts[:filters].length > 0
+
+  # filter out top level logic operators for now
+
+  opts[:filters] = opts[:filters].select do |bit|
+    bit.class == Hash
+  end
+
+  # apply locked filters
+
+  if opts[:lock_filter]
+    opts[:lock_filter].each do |filter, value|
+      opts[:filters] << { :name => filter, :expr => { :terms => [value] } }
+    end
+  end
+
+  # perform search if requested
+
+  query_problem = false
+
+  if params["query"]
+    drop_search_results_table
+    if !create_search_results_table(params["query"], opts)
+      params["query"] = nil
+      query_problem = true
+    end
+  end
+
+  if params[:query]
+    klass     = SearchResult
+    auth_type = "search_results.result_type"
+    auth_id   = "search_results.result_id"
+    group_by  = "search_results.result_type, search_results.result_id"
+  else
+    klass     = opts[:model]     || Contribution
+    auth_type = opts[:auth_type] || "contributions.contributable_type"
+    auth_id   = opts[:auth_id]   || "contributions.contributable_id"
+    group_by  = opts[:group_by]  || "contributions.contributable_type, contributions.contributable_id"
+  end
+
+  # determine joins, conditions and order for the main results
+
+  pivot_options["filters"].each do |filter|
+    if filter_list = find_filter(opts[:filters], filter["query_option"])
+      conditions << comparison(column(filter["id_column"], opts.merge( { :auth_type => auth_type, :auth_id => auth_id } )), filter_list[:expr][:terms])
+      joins += filter["joins"] if filter["joins"]
+    end
+  end
+
+  order_options = pivot_options["order"].find do |x|
+    x["option"] == params[:order]
+  end
+
+  order_options ||= pivot_options["order"].first
+
+  joins += order_options["joins"] if order_options["joins"]
+
+  having_bits = []
+
+#   pivot_options["filters"].each do |filter|
+#     if params["and_#{filter["query_option"]}"]
+#       having_bits << "GROUP_CONCAT(DISTINCT #{filter["id_column"]} ORDER BY #{filter["id_column"]}) = \"#{escape_sql(opts[:filters][filter["query_option"]])}\""
+#     end
+#   end
+
+  having_clause = ""
+
+  if having_bits.length > 0
+    having_clause = "HAVING #{having_bits.join(' AND ')}"
+  end
+
+  # perform the results query
+
+  collection = Authorization.scoped(klass,
+                                    :authorised_user => user,
+                                    :include_permissions => true,
+                                    :auth_type => auth_type,
+                                    :auth_id => auth_id)
+
+  result_options = {:joins => merge_joins(joins, pivot_options, collection.permission_conditions, :auth_type => auth_type, :auth_id => auth_id),
+                    :conditions => conditions.length.zero? ? nil : conditions.join(" AND "),
+                    :group => "#{group_by} #{having_clause}",
+                    :order => order_options["order"]}
+
+  unless opts[:no_pagination]
+    result_options[:page] = { :size => params["num"] ? params["num"].to_i : nil, :current => params["page"] }
+  end
+
+  results = collection.find(:all, result_options)
+
+  # produce a query hash to match the current filters
+
+  opts[:filter_params] = {}
+
+  pivot_options["filters"].each do |filter|
+    if params[filter["query_option"]]
+      next if opts[:lock_filter] && opts[:lock_filter][filter["query_option"]]
+      opts[:filter_params][filter["query_option"]] = params[filter["query_option"]]
+    end
+  end
+
+  # produce the filter list
+
+  opts_for_filter_query = opts.merge( { :auth_type => auth_type,
+                                        :auth_id => auth_id, :group_by => group_by } )
+
+  filters, cancel_filter_query_url = calculate_filters(collection, params, opts_for_filter_query, pivot_options, user)
+
+  # produce the summary.  If a filter query is specified, then we need to
+  # recalculate the filters without the query to get all of them.
+
+  if params[:filter_query]
+    filters2 = calculate_filters(collection, params, opts_for_filter_query.merge( { :inhibit_filter_query => true } ), pivot_options, user)[0]
+  else
+    filters2 = filters
+  end
+
+  summary = ""
+
+  filters2.select do |filter|
+
+    next if opts[:lock_filter] && opts[:lock_filter][filter["query_option"]]
+
+    selected = filter[:objects].select do |x| x[:selected] end
+    current  = selected.map do |x| x[:value] end
+
+    if selected.length > 0
+      selected_labels = selected.map do |x|
+
+        expr = deep_clone(opts[:filters])
+
+        f = find_filter(expr, filter["query_option"])
+
+        expr -= f[:expr][:terms] -= [x[:value]]
+        expr -= [f] if f[:expr][:terms].empty?
+
+        x[:plain_label] + ' <a href="" + url_for(build_url(params, opts, expr,
+                                                           [:filter, :filter_query, :order], pivot_options)) +
+            '">' + " <img src='' /></a>"
+
+      end
+
+      bits = selected_labels.map do |label| label end.join(" <i>or</i> ")
+
+      summary << '<span class="filter-in-use"><b>' + filter["title"].capitalize + "</b>: " + bits + "</span> "
+    end
+  end
+
+  if params[:filter_query]
+    cancel_filter_query_url = build_url(params, opts, opts[:filters], [:filter, :order], pivot_options)
+  end
+
+  if include_reset_url
+    reset_filters_url = build_url(params, opts, opts[:filters], [:order], pivot_options)
+  end
+
+  # remove filters that do not help in narrowing down the result set
+
+  filters = filters.select do |filter|
+    if filter[:objects].empty?
+      false
+    elsif opts[:lock_filter] && opts[:lock_filter][filter["query_option"]]
+      false
+    else
+      true
+    end
+  end
+
+  {
+      :results                 => results,
+      :filters                 => filters,
+      :reset_filters_url       => reset_filters_url,
+      :cancel_filter_query_url => cancel_filter_query_url,
+      :filter_query_url        => build_url(params, opts, opts[:filters], [:filter], pivot_options),
+      :summary                 => summary,
+      :pivot_options           => pivot_options,
+      :query_problem           => query_problem
+  }
+end
+
+def visible_name(entity)
+  name = entity.class.name
+
+  if Conf.model_aliases.value?(name)
+    Conf.model_aliases.each do |al, model|
+      name = al if name == model
+    end
+  end
+
+  name
+end
+

Modified: trunk/lib/rest.rb (3414 => 3415)


--- trunk/lib/rest.rb	2013-02-11 13:57:40 UTC (rev 3414)
+++ trunk/lib/rest.rb	2013-02-11 16:00:22 UTC (rev 3415)
@@ -7,6 +7,7 @@
 require 'lib/excel_xml'
 require 'xml/libxml'
 require 'uri'
+require 'pivoting'
 
 include LibXML
 
@@ -36,7 +37,7 @@
     rules.map do |method, rules|
       next unless rules["Method"]       == "GET"
       next unless rules["Type"]         == "crud"
-      
+
       result[rules["Model Entity"]] = rules["REST Entity"]
     end
   end
@@ -2357,25 +2358,56 @@
 def get_components(opts)
   query = opts[:query]
 
-  annotations = (query['annotations'] || "").split('","').collect {|a| a.gsub('"','')} # annotations on workflow itself
+  annotations = query['annotations']  # annotations on workflow itself
   # annotations on workflow features
-  inputs = query["input"] || {}#(query["inputs"] || "").split('),')
-  outputs = query["output"] || {} #(query["outputs"] || "").split('),')
-  processors = query["processor"] || {} #(query["processors"] || "").split('),')
+  inputs = query["input"]
+  outputs = query["output"]
+  processors = query["processor"]
 
+  # Filter workflow set
+  pivot, problem = calculate_pivot(
+      :pivot_options  => Conf.pivot_options,
+      :params         => query,
+      :user           => opts[:user],
+      :search_models  => [Workflow],
+      :no_pagination  => true,
+      :locked_filters => { 'CATEGORY' => 'Workflow' },
+      :active_filters => ["CATEGORY", "TYPE_ID", "TAG_ID", "USER_ID",
+                          "LICENSE_ID", "GROUP_ID", "WSDL_ENDPOINT",
+                          "CURATION_EVENT", "SERVICE_PROVIDER",
+                          "SERVICE_COUNTRY", "SERVICE_STATUS"])
 
-  workflows = Workflow.all # This should be filtered first
+  workflow_ids = pivot[:results].map {|r| r.is_a?(SearchResult) ? r.result_id : r.contributable_id }
 
-  def get_workflow_feature_matches(workflows, features, model, query_conditions, query_conditions_excluding)
-    # "features" is an array of sets of annotations to be queried, in the form ['"<ann1>,"<ann2>"','"<ann3>"']
+  begin
+    matches = filter_by_semantic_annotations(workflow_ids, inputs, outputs, processors, annotations)
+  rescue RuntimeError => e
+    if e.message == "Bad Syntax"
+      return rest_response(400)
+    else
+      raise e
+    end
+  end
+
+  # Render
+  produce_rest_list(opts[:uri], opts[:rules], query, matches, "workflows", [], opts[:user])
+end
+
+
+private
+
+# Here be dragons!
+def filter_by_semantic_annotations(workflow_ids, inputs, outputs, processors, annotations)
+
+  # This method returns an array of workflow ids for workflows that possess all of the specified features.
+  def get_workflow_feature_matches(workflow_ids, features, model, query_conditions, query_conditions_excluding)
+    # "features" is an array of sets of annotations to be queried, in the form [ '"<ann1>","<ann2>"' , '"<ann3>"' ]
+    # Where "<ann1>" etc. is in the form "pred1 obj1", where pred1 and obj1 are the predicate and object parts of an RDF triple, respectively..
     # The above example states that the workflow must have a <feature> that has annotations "pred1 obj1" and "pred2 obj2", AND
-    # another, different <feature> with "<ann3>".
-    #
-    # The annotations are in the form "<predicate> <object>".
+    # another, different <feature> with "pred3 obj3".
 
-    # This method returns an array of arrays of workflows
     selected = []
-    features.collect do |key,set|
+    feature_matches = features.collect do |key,set|
       raise "Bad Syntax" unless set =~ /^("[^ ]+ [^"]+")(,"[^ ]+ [^"]+")*$/
 
       feature_annotations = set.split('","').collect {|a| a.gsub('"','')}
@@ -2385,58 +2417,57 @@
         predicate, object = a.split(" ", 2)
         unless selected.empty?
           model.find(:all, :include => :semantic_annotations,
-                            :conditions => [query_conditions, workflows, predicate, object, selected])
+                           :conditions => [query_conditions, workflow_ids, predicate, object, selected])
         else
           model.find(:all, :include => :semantic_annotations,
-                            :conditions => [query_conditions_excluding, workflows, predicate, object])
+                           :conditions => [query_conditions_excluding, workflow_ids, predicate, object])
         end
 
       }.inject {|f, matches| matches & f} # Get the intersection of <features> that have each annotation.
                                           #   ie. the set of <features> that have ALL the required annotations
       selected += matching_features
-      matching_features.collect {|wp| wp.workflow} # Get the workflows that those features belong to
+      matching_features.collect {|wp| wp.workflow_id} # Get the workflows that those features belong to
     end
+
+    feature_matches.inject {|matches, matches_all| matches_all & matches}
   end
 
-  begin
-    # Workflows that have the required inputs
-    matches_input_requirements = get_workflow_feature_matches(workflows, inputs, WorkflowPort,
-                                  "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'input' AND workflow_ports.id NOT IN (?)",
-                                  "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'input'")
 
-    # Workflows that have the required outputs
-    matches_output_requirements = get_workflow_feature_matches(workflows, outputs, WorkflowPort,
-                                  "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'output' AND workflow_ports.id NOT IN (?)",
-                                  "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'output'")
+  # Filter for workflows that have the required inputs
+  if inputs
+    workflow_ids = workflow_ids & get_workflow_feature_matches(workflow_ids, inputs, WorkflowPort,
+                                    "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'input' AND workflow_ports.id NOT IN (?)",
+                                    "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'input'")
+  end
 
-    # Workflows that have the required processors
-    matches_processor_requirements = get_workflow_feature_matches(workflows, processors, WorkflowProcessor,
-                                  "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND workflow_processors.id NOT IN (?)",
-                                  "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ?")
+  # Filter for workflows that have the required outputs
+  if outputs
+    workflow_ids = workflow_ids & get_workflow_feature_matches(workflow_ids, outputs, WorkflowPort,
+                                    "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'output' AND workflow_ports.id NOT IN (?)",
+                                    "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND port_type = 'output'")
+  end
 
-    # Workflows that have the required semantic annotations
-    matches_semantic_annotation_requirements = annotations.collect do |p|
-      [Workflow.find(:all, :include => :semantic_annotations,
-                     :conditions => {:id => workflows,
-                                     :semantic_annotations => {:value => p}})]
-    end
-  rescue RuntimeError => e
-    if e.message == "Bad Syntax"
-      return rest_response(400)
-    else
-      raise e
-    end
+  # Filter for workflows that have the required processors
+  if processors
+    workflow_ids = workflow_ids & get_workflow_feature_matches(workflow_ids, processors, WorkflowProcessor,
+                                    "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ? AND workflow_processors.id NOT IN (?)",
+                                    "workflow_id IN (?) AND semantic_annotations.predicate = ? AND semantic_annotations.object = ?")
   end
 
-  # Create an array containing arrays of each set of matches
-  matches = matches_input_requirements +
-            matches_output_requirements +
-            matches_processor_requirements +
-            matches_semantic_annotation_requirements
+  # Filter for workflows that have the required semantic annotations
+  unless annotations.blank?
+    raise "Bad Syntax" unless annotations =~ /^("[^ ]+ [^"]+")(,"[^ ]+ [^"]+")*$/
 
+    annotations = annotations.split('","').collect {|a| a.gsub('"','')}
+
+    matches_semantic_annotation_requirements = annotations.collect { |a|
+      predicate, object = a.split(" ", 2)
+      SemanticAnnotation.find_all_by_predicate_and_object_and_subject_type(predicate, object, "Workflow").map {|a| a.subject_id}
+    }
+
+    workflow_ids = workflow_ids & matches_semantic_annotation_requirements.inject {|matches, matches_all| matches_all & matches}
+  end
+
   # Workflows that match ALL the requirements - the intersection of all the sub arrays.
-  matches = matches.inject {|matches, matches_all| matches_all & matches}
-
-  # Render
-  produce_rest_list(opts[:uri], opts[:rules], query, matches, "workflows", [], opts[:user])
+  Workflow.find_all_by_id(workflow_ids)
 end

Modified: trunk/test/fixtures/content_blobs.yml (3414 => 3415)


--- trunk/test/fixtures/content_blobs.yml	2013-02-11 13:57:40 UTC (rev 3414)
+++ trunk/test/fixtures/content_blobs.yml	2013-02-11 16:00:22 UTC (rev 3415)
@@ -24,3 +24,6 @@
   id: 4
   data: <%= load_blob_data('workflow_branch_choice.xml') %>
 
+component_workflow_blob:
+  id: 5
+  data: <%= load_blob_data('image_to_tiff_migration.t2flow') %>
\ No newline at end of file

Modified: trunk/test/fixtures/contributions.yml (3414 => 3415)


--- trunk/test/fixtures/contributions.yml	2013-02-11 13:57:40 UTC (rev 3414)
+++ trunk/test/fixtures/contributions.yml	2013-02-11 16:00:22 UTC (rev 3415)
@@ -131,4 +131,27 @@
   downloads_count: 1
   viewings_count: 1
 
+contribution_for_component_workflow:
+  id: 12
+  contributor_id: 1
+  contributor_type: User
+  contributable_id: 3
+  contributable_type: Workflow
+  policy_id: 9
+  created_at: 2008-08-07 23:53:46
+  updated_at: 2008-08-07 23:53:46
+  downloads_count: 1
+  viewings_count: 1
 
+contribution_for_component_workflow2:
+  id: 13
+  contributor_id: 2
+  contributor_type: User
+  contributable_id: 4
+  contributable_type: Workflow
+  policy_id: 10
+  created_at: 2008-08-07 23:53:46
+  updated_at: 2008-08-07 23:53:46
+  downloads_count: 1
+  viewings_count: 1
+

Added: trunk/test/fixtures/files/image_to_tiff_migration.t2flow (0 => 3415)


--- trunk/test/fixtures/files/image_to_tiff_migration.t2flow	                        (rev 0)
+++ trunk/test/fixtures/files/image_to_tiff_migration.t2flow	2013-02-11 16:00:22 UTC (rev 3415)
@@ -0,0 +1,278 @@
+<workflow xmlns="http://taverna.sf.net/2008/xml/t2flow" version="1" producedBy="taverna-2.4.0"><dataflow id="16841f82-e1fe-4527-b7ef-411c4c59493b" role="top"><name>Image_to_tiff_migrat</name><inputPorts><port><name>from_uri</name><depth>0</depth><granularDepth>0</granularDepth><annotations><annotation_chain encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.FreeTextDescription">
+        <text>description</text>
+      </annotationBean>
+      <date>2012-11-16 16:22:56.977 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain><annotation_chain encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.ExampleValue">
+        <text>example</text>
+      </annotationBean>
+      <date>2012-11-16 16:22:43.712 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.SemanticAnnotation">
+        <mimeType>text/rdf+n3</mimeType>
+        <content>[]    &lt;http://scape-project.eu/pc/vocab/profiles#hasPortType&gt;
+              &lt;http://scape-project.eu/pc/vocab/profiles#FromURIPort&gt; .
+</content>
+      </annotationBean>
+      <date>2012-11-19 15:53:49.250 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2></annotations></port><port><name>to_uri</name><depth>0</depth><granularDepth>0</granularDepth><annotations><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.SemanticAnnotation">
+        <mimeType>text/rdf+n3</mimeType>
+        <content>[]    &lt;http://scape-project.eu/pc/vocab/profiles#hasPortType&gt;
+              &lt;http://scape-project.eu/pc/vocab/profiles#ToURIPort&gt; .
+</content>
+      </annotationBean>
+      <date>2012-11-20 12:09:41.928 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2></annotations></port></inputPorts><outputPorts /><processors><processor><name>Tool</name><inputPorts><port><name>to_uri</name><depth>0</depth></port><port><name>from_uri</name><depth>0</depth></port></inputPorts><outputPorts /><annotations><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.SemanticAnnotation">
+        <mimeType>text/rdf+n3</mimeType>
+        <content>[]    &lt;http://scape-project.eu/pc/vocab/profiles#hasDependency&gt;
+              &lt;http://scape-project.eu/pc/vocab/profiles#imagemagick-image2tiff&gt; .
+</content>
+      </annotationBean>
+      <date>2012-11-20 12:17:37.420 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2></annotations><activities><activity><raven><group>net.sf.taverna.t2.activities</group><artifact>external-tool-activity</artifact><version>1.4</version></raven><class>net.sf.taverna.t2.activities.externaltool.ExternalToolActivity</class><inputMap><map from="to_uri" to="to_uri" /><map from="from_uri" to="from_uri" /></inputMap><outputMap /><configBean encoding="xstream"><net.sf.taverna.t2.activities.externaltool.ExternalToolActivityConfigurationBean xmlns="">
+  <mechanismType>789663B8-DA91-428A-9F7D-B3F3DA185FD4</mechanismType>
+  <mechanismName>default local</mechanismName>
+  <mechanismXML>&lt;?xml version="1.0" encoding="UTF-8"?&gt;&#xD;
+&lt;localInvocation&gt;&lt;shellPrefix&gt;/bin/sh -c&lt;/shellPrefix&gt;&lt;linkCommand&gt;/bin/ln -s %%PATH_TO_ORIGINAL%% %%TARGET_NAME%%&lt;/linkCommand&gt;&lt;/localInvocation&gt;&#xD;
+</mechanismXML>
+  <externaltoolid>1c6b3cec-667d-4364-8a77-315a3b3c71f8</externaltoolid>
+  <useCaseDescription>
+    <usecaseid />
+    <description />
+    <command>convert %%from_uri%% tiff:%%to_uri%%</command>
+    <preparingTimeoutInSeconds>1200</preparingTimeoutInSeconds>
+    <executionTimeoutInSeconds>1800</executionTimeoutInSeconds>
+    <tags>
+      <string>from_uri</string>
+      <string>to_uri</string>
+    </tags>
+    <REs />
+    <queue__preferred />
+    <queue__deny />
+    <static__inputs />
+    <inputs>
+      <entry>
+        <string>to_uri</string>
+        <de.uni__luebeck.inb.knowarc.usecases.ScriptInputUser>
+          <tag>to_uri</tag>
+          <file>false</file>
+          <tempFile>false</tempFile>
+          <binary>false</binary>
+          <charsetName>MacRoman</charsetName>
+          <forceCopy>false</forceCopy>
+          <list>false</list>
+          <concatenate>false</concatenate>
+          <mime />
+        </de.uni__luebeck.inb.knowarc.usecases.ScriptInputUser>
+      </entry>
+      <entry>
+        <string>from_uri</string>
+        <de.uni__luebeck.inb.knowarc.usecases.ScriptInputUser>
+          <tag>from_uri</tag>
+          <file>false</file>
+          <tempFile>false</tempFile>
+          <binary>false</binary>
+          <charsetName>MacRoman</charsetName>
+          <forceCopy>false</forceCopy>
+          <list>false</list>
+          <concatenate>false</concatenate>
+          <mime />
+        </de.uni__luebeck.inb.knowarc.usecases.ScriptInputUser>
+      </entry>
+    </inputs>
+    <outputs />
+    <includeStdIn>false</includeStdIn>
+    <includeStdOut>true</includeStdOut>
+    <includeStdErr>true</includeStdErr>
+    <validReturnCodes>
+      <int>0</int>
+    </validReturnCodes>
+  </useCaseDescription>
+  <edited>false</edited>
+</net.sf.taverna.t2.activities.externaltool.ExternalToolActivityConfigurationBean></configBean><annotations /></activity></activities><dispatchStack><dispatchLayer><raven><group>net.sf.taverna.t2.core</group><artifact>workflowmodel-impl</artifact><version>1.4</version></raven><class>net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.Parallelize</class><configBean encoding="xstream"><net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.ParallelizeConfig xmlns="">
+  <maxJobs>1</maxJobs>
+</net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.ParallelizeConfig></configBean></dispatchLayer><dispatchLayer><raven><group>net.sf.taverna.t2.core</group><artifact>workflowmodel-impl</artifact><version>1.4</version></raven><class>net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.ErrorBounce</class><configBean encoding="xstream"><null xmlns="" /></configBean></dispatchLayer><dispatchLayer><raven><group>net.sf.taverna.t2.core</group><artifact>workflowmodel-impl</artifact><version>1.4</version></raven><class>net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.Failover</class><configBean encoding="xstream"><null xmlns="" /></configBean></dispatchLayer><dispatchLayer><raven><group>net.sf.taverna.t2.core</group><artifact>workflowmodel-impl</artifact><version>1.4</version></raven><class>net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.Retry</class><configBean encoding="xstream"><net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.RetryConfig xmlns="">
+  <backoffFactor>1.0</backoffFactor>
+  <initialDelay>1000</initialDelay>
+  <maxDelay>5000</maxDelay>
+  <maxRetries>0</maxRetries>
+</net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.RetryConfig></configBean></dispatchLayer><dispatchLayer><raven><group>net.sf.taverna.t2.core</group><artifact>workflowmodel-impl</artifact><version>1.4</version></raven><class>net.sf.taverna.t2.workflowmodel.processor.dispatch.layers.Invoke</class><configBean encoding="xstream"><null xmlns="" /></configBean></dispatchLayer></dispatchStack><iterationStrategyStack><iteration><strategy><cross><port name="to_uri" depth="0" /><port name="from_uri" depth="0" /></cross></strategy></iteration></iterationStrategyStack></processor></processors><conditions /><datalinks><datalink><sink type="processor"><processor>Tool</processor><port>to_uri</port></sink><source type="dataflow"><port>to_uri</port></source></datalink><datalink><sink type="processor"><processor>Tool</processor><port>from_uri</port></sink><source type="dataflow"><port>from_uri</port></source></datalink></datalinks><annotations><annotation_chain encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.FreeTextDescription">
+        <text>SCAPE Migration Components that converts any ImageMagick supported image format to TIFF</text>
+      </annotationBean>
+      <date>2012-11-15 13:07:59.763 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>fd401dac-c521-4d94-8014-8efeba355d7a</identification>
+      </annotationBean>
+      <date>2012-11-19 15:55:14.145 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>22bc577f-9a74-4981-8725-c3b4ac28d88b</identification>
+      </annotationBean>
+      <date>2012-11-20 12:10:07.110 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>052219d1-09ed-4c6b-a5a4-2bad004a10ed</identification>
+      </annotationBean>
+      <date>2012-11-19 16:17:25.295 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>5abab48b-a3f0-4f14-9894-e824a2fa5966</identification>
+      </annotationBean>
+      <date>2012-11-16 16:39:00.691 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.Author">
+        <text>David Withers</text>
+      </annotationBean>
+      <date>2012-11-15 12:25:42.359 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>b5361a0e-6abd-4070-8655-5dd7b4237576</identification>
+      </annotationBean>
+      <date>2012-11-15 13:36:38.115 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>2bec9695-c132-4728-988c-c3a03ddcc78d</identification>
+      </annotationBean>
+      <date>2012-11-15 13:08:31.374 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>0d60174b-0d75-481d-8b4d-194c6109bc96</identification>
+      </annotationBean>
+      <date>2012-11-20 12:17:54.280 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.SemanticAnnotation">
+        <mimeType>text/rdf+n3</mimeType>
+        <content>[]    &lt;http://scape-project.eu/pc/vocab/profiles#hasMigrationPath&gt;
+              &lt;http://scape-project.eu/pc/vocab/profiles#jpegToTiff&gt; .
+</content>
+      </annotationBean>
+      <date>2012-11-20 12:17:49.904 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>16841f82-e1fe-4527-b7ef-411c4c59493b</identification>
+      </annotationBean>
+      <date>2012-11-21 11:00:56.248 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain_2_2 encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.IdentificationAssertion">
+        <identification>a5aee4f0-01d8-4ae3-96f0-c992b8d40af2</identification>
+      </annotationBean>
+      <date>2012-11-15 13:14:14.243 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain_2_2><annotation_chain encoding="xstream"><net.sf.taverna.t2.annotation.AnnotationChainImpl xmlns="">
+  <annotationAssertions>
+    <net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+      <annotationBean class="net.sf.taverna.t2.annotation.annotationbeans.DescriptiveTitle">
+        <text>Image to tiff migration action</text>
+      </annotationBean>
+      <date>2012-11-21 11:00:54.84 UTC</date>
+      <creators />
+      <curationEventList />
+    </net.sf.taverna.t2.annotation.AnnotationAssertionImpl>
+  </annotationAssertions>
+</net.sf.taverna.t2.annotation.AnnotationChainImpl></annotation_chain></annotations></dataflow></workflow>
\ No newline at end of file

Modified: trunk/test/fixtures/policies.yml (3414 => 3415)


--- trunk/test/fixtures/policies.yml	2013-02-11 13:57:40 UTC (rev 3414)
+++ trunk/test/fixtures/policies.yml	2013-02-11 16:00:22 UTC (rev 3415)
@@ -77,4 +77,24 @@
   share_mode: 0
   update_mode: 0
   created_at: 2007-10-22 18:54:22
+  updated_at: 2008-01-09 12:12:12
+
+component_workflow_policy:
+  id: 9
+  contributor_id: 1
+  contributor_type: User
+  name: auto
+  share_mode: 0
+  update_mode: 0
+  created_at: 2007-10-22 18:54:22
+  updated_at: 2008-01-09 12:12:12
+
+component_workflow_policy2:
+  id: 10
+  contributor_id: 2
+  contributor_type: User
+  name: auto
+  share_mode: 0
+  update_mode: 0
+  created_at: 2007-10-22 18:54:22
   updated_at: 2008-01-09 12:12:12
\ No newline at end of file

Added: trunk/test/fixtures/semantic_annotations.yml (0 => 3415)


--- trunk/test/fixtures/semantic_annotations.yml	                        (rev 0)
+++ trunk/test/fixtures/semantic_annotations.yml	2013-02-11 16:00:22 UTC (rev 3415)
@@ -0,0 +1,36 @@
+has_migration_path:
+  subject_id: 3
+  subject_type: Workflow
+  predicate: http://scape-project.eu/pc/vocab/profiles#hasMigrationPath
+  object: http://scape-project.eu/pc/vocab/profiles#jpegToTiff
+
+has_dependency:
+  subject_id: 1
+  subject_type: WorkflowProcessor
+  predicate: http://scape-project.eu/pc/vocab/profiles#hasDependency
+  object: http://scape-project.eu/pc/vocab/profiles#imagemagick-image2tiff
+
+has_port_type_from:
+  subject_id: 1
+  subject_type: WorkflowPort
+  predicate: http://scape-project.eu/pc/vocab/profiles#hasPortType
+  object: http://scape-project.eu/pc/vocab/profiles#FromURIPort
+
+has_port_type_to:
+  subject_id: 2
+  subject_type: WorkflowPort
+  predicate: http://scape-project.eu/pc/vocab/profiles#hasPortType
+  object: http://scape-project.eu/pc/vocab/profiles#ToURIPort
+
+has_port_type_from2:
+  subject_id: 3
+  subject_type: WorkflowPort
+  predicate: http://scape-project.eu/pc/vocab/profiles#hasPortType
+  object: http://scape-project.eu/pc/vocab/profiles#FromURIPort
+
+second_port_annotation:
+  subject_id: 3
+  subject_type: WorkflowPort
+  predicate: http://scape-project.eu/pc/vocab/profiles#hasFish
+  object: http://scape-project.eu/pc/vocab/profiles#CornishSardine
+

Copied: trunk/test/fixtures/workflow_ports.yml (from rev 3401, trunk/test/fixtures/workflows.yml) (0 => 3415)


--- trunk/test/fixtures/workflow_ports.yml	                        (rev 0)
+++ trunk/test/fixtures/workflow_ports.yml	2013-02-11 16:00:22 UTC (rev 3415)
@@ -0,0 +1,18 @@
+# Read about fixtures at http://ar.rubyonrails.org/classes/Fixtures.html
+from_uri_port:
+  id: 1
+  name: from_uri
+  port_type: input
+  workflow_id: 3
+
+to_uri_port:
+  id: 2
+  name: to_uri
+  port_type: input
+  workflow_id: 3
+
+single_port:
+  id: 3
+  name: from_uri
+  port_type: input
+  workflow_id: 4
\ No newline at end of file

Added: trunk/test/fixtures/workflow_processors.yml (0 => 3415)


--- trunk/test/fixtures/workflow_processors.yml	                        (rev 0)
+++ trunk/test/fixtures/workflow_processors.yml	2013-02-11 16:00:22 UTC (rev 3415)
@@ -0,0 +1,5 @@
+# Read about fixtures at http://ar.rubyonrails.org/classes/Fixtures.html
+migration_tool:
+  id: 1
+  name: Tool
+  workflow_id: 3

Modified: trunk/test/fixtures/workflows.yml (3414 => 3415)


--- trunk/test/fixtures/workflows.yml	2013-02-11 13:57:40 UTC (rev 3414)
+++ trunk/test/fixtures/workflows.yml	2013-02-11 16:00:22 UTC (rev 3415)
@@ -26,3 +26,32 @@
   current_version: 1
   content_type_id: 3
   content_blob_id: 4
+
+component_workflow:
+  id: 3
+  contributor_id: 1
+  contributor_type: User
+  title: Image to TIFF Migration Component
+  unique_name: image_to_tiff
+  body: Component thing with annotations
+  created_at: 2008-04-11 22:19:33
+  updated_at: 2008-05-14 06:45:08
+  license_id: 2
+  current_version: 1
+  content_type_id: 4
+  content_blob_id: 5
+
+component_workflow2:
+  id: 4
+  contributor_id: 2
+  contributor_type: User
+  title: Another Component
+  unique_name: component_workflow
+  body: Second component thing with annotations
+  created_at: 2008-04-11 22:19:33
+  updated_at: 2008-05-14 06:45:08
+  license_id: 2
+  current_version: 1
+  content_type_id: 4
+  content_blob_id: 5
+

Modified: trunk/test/functional/api_controller_test.rb (3414 => 3415)


--- trunk/test/functional/api_controller_test.rb	2013-02-11 13:57:40 UTC (rev 3414)
+++ trunk/test/functional/api_controller_test.rb	2013-02-11 16:00:22 UTC (rev 3415)
@@ -1014,6 +1014,74 @@
     assert_response(:not_found)
   end
 
+  # component querying
+
+  def test_basic_component_query
+    login_as(:john)
+
+    resp = rest_request(:get, 'components', nil, {"input" => {"0" => '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#FromURIPort"'}})
+
+    assert_response(:success)
+    assert_equal 2, resp.find('//workflow').size
+    assert_equal [3,4], resp.find('//workflow').map {|w| w['uri'].split('?id=').last.to_i}
+  end
+
+  def test_two_annotation_component_query
+    login_as(:john)
+
+    anns = '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#FromURIPort"' +
+          ',"http://scape-project.eu/pc/vocab/profiles#hasFish http://scape-project.eu/pc/vocab/profiles#CornishSardine"'
+    resp = rest_request(:get, 'components', nil, {"input" => {"0" => anns}})
+
+    assert_response(:success)
+    assert_equal 1, resp.find('//workflow').size
+    assert_equal 4, resp.find('//workflow').first['uri'].split('?id=').last.to_i
+  end
+
+  def test_two_feature_component_query
+    login_as(:john)
+
+    resp = rest_request(:get, 'components', nil, {'input' => {'0' => '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#FromURIPort"',
+                                                              '1' => '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#ToURIPort"'}
+                                                 })
+
+    assert_response(:success)
+    assert_equal 1, resp.find('//workflow').size
+    assert_equal 3, resp.find('//workflow').first['uri'].split('?id=').last.to_i
+  end
+
+  def test_verbose_component_query
+    login_as(:john)
+
+    resp = rest_request(:get, 'components', nil, {'input' => {'0' => '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#FromURIPort"',
+                                                              '1' => '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#ToURIPort"'},
+                                                  'processor' => {'0' => '"http://scape-project.eu/pc/vocab/profiles#hasDependency http://scape-project.eu/pc/vocab/profiles#imagemagick-image2tiff"'},
+                                                  'annotations' => '"http://scape-project.eu/pc/vocab/profiles#hasMigrationPath http://scape-project.eu/pc/vocab/profiles#jpegToTiff"'
+                                                 })
+
+    assert_response(:success)
+    assert_equal 1, resp.find('//workflow').size
+    assert_equal 3, resp.find('//workflow').first['uri'].split('?id=').last.to_i
+  end
+
+  def test_component_query_with_filters
+    login_as(:john)
+
+    resp = rest_request(:get, 'components', nil, {"input" => {"0" => '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#FromURIPort"'},
+                                                  "filter" => 'USER_ID("2")'})
+
+    assert_response(:success)
+    assert_equal 1, resp.find('//workflow').size
+    assert_equal 4, resp.find('//workflow').last['uri'].split('?id=').last.to_i
+
+    resp = rest_request(:get, 'components', nil, {"input" => {"0" => '"http://scape-project.eu/pc/vocab/profiles#hasPortType http://scape-project.eu/pc/vocab/profiles#FromURIPort"'},
+                                                  "filter" => 'USER_ID("1")'})
+
+    assert_response(:success)
+    assert_equal 1, resp.find('//workflow').size
+    assert_equal 3, resp.find('//workflow').last['uri'].split('?id=').last.to_i
+  end
+
   private
 
   def rest_request(method, uri, data = "" query = {})

reply via email to

[Prev in Thread] Current Thread [Next in Thread]