Can't do OO in javascript. But you can do AJAX with HTML forms.



  •   [Outer frame]
     
        function BuildStringDocs() {
            var i, iInv1,iInv2, iAux;
            var sReturn='';
            var rg = new RegExp('%u20AC','ig');
            for (i=0;i<top.aDocs[0].length;i++){
                if (top.aDocs[0][i]!=undefined){
                    sReturn = sReturn + '&inv' + k + '=CreateDoc';
                    iInv1 = k;
                    k = k + 1;
                    sReturn = sReturn + '&inv' + k + '=registerDoc';
                    sReturn = sReturn + '&inv' + k + '.document.name.id=@inv' + iInv1;
                    sReturn = sReturn + '&inv' + k + '.document.name.str=' + (escape(top.aDocs[1][i])).replace(rg,'€');
                    sReturn = sReturn + '&inv' + k + '.document.accesspath=' + top.aDocs[3][i];
                    sReturn = sReturn + '&inv' + k + '.document.creator.id=' + "<%= mysession.Context.ActorId %>";
                    sReturn = sReturn + '&inv' + k + '.document.docclass=' + top.aDocs[2][i];
                    k = k + 1;
                    sReturn = sReturn + '&inv' + k + '=attachDoc';
                    sReturn = sReturn + '&inv' + k + '.caseName.id=' + "<%= CurrentTask.WorkCase.Id %>";
                    sReturn = sReturn + '&inv' + k + '.documentName.id=@inv' + iInv1;
                    k = k + 1;
                }
            }
            return sReturn;
        }

        var k=0;
        top.aDocs = new Array();
        top.aDocs[0] = new Array(); // rowid
        top.aDocs[1] = new Array(); // inv2.document.name.str
        top.aDocs[2] = new Array(); // inv2.document.docClass
        top.aDocs[3] = new Array(); // inv2.document.accessPath
        top.aDocs[4] = new Array(); // docid

        top.aDocs[5] = new Array(); // @SAP_ARCHIV_ID.value
        top.aDocs[6] = new Array(); // @SAP_ARC_DOC_ID.value
        top.aDocs[7] = new Array(); // @SAP_AR_OBJECT.value

        top.nDocs=0;
     
     
      [Inner Frame]
     
          function removeRow(nDocRow){
            var i=0;
            var rowid = 'Document' + nDocRow;
            var orow = TBODYAttach.rows.namedItem(rowid);
            var rowindex = orow.sectionRowIndex;
            TBODYAttach.deleteRow(rowindex);
            
            for (i=0 ; i<top.aDocs[0].length ; i++){
                if (top.aDocs[0][i]==parseInt(nDocRow)){
                    // *** Deletes the values from the array
                    top.aDocs[0].splice(i,1);
                    top.aDocs[1].splice(i,1);
                    top.aDocs[2].splice(i,1);
                    top.aDocs[3].splice(i,1);
                    top.aDocs[4].splice(i,1);
                }    
            }        
        }
        
        
        function AddRowtoTable(sDocname, sAccessPath){
            var uploadForm    = document.uploadForm;
            var dataForm      = document.attachFormUploadedDone;
            var attachUrlForm = document.attachURLForm;
          
            if (!sDocname){
                sDocname = uploadForm['DocName'].value;
            }
               
            if (!sAccessPath){
                    sAccessPath = sURL.value;
            }

            var i,nDocs;
            var tbody = document.getElementById('TBODYAttach');  
            var row = document.createElement("TR");
            var td1 = document.createElement("TD");
            var td2 = document.createElement("TD");
            var td3 = document.createElement("TD");
            var td4 = document.createElement("TD");
            var td5 = document.createElement("TD");

            // *** Inserts values in the array
            nDocs = top.nDocs;
            top.aDocs[0][nDocs] = nDocs;
            top.aDocs[1][nDocs] = document.attachFormUploadedDone.FULLNAME.value; // inv2.document.name.str
            top.aDocs[2][nDocs] = document.attachFormUploadedDone.DocClass.value; // inv2.document.docClass
            top.aDocs[3][nDocs] = sAccessPath; // inv2.document.accessPath
            top.aDocs[4][nDocs] = "<%=docum.Name.Id%>";

            row.id = 'Document' + nDocs;


            // o o o
            
            
            td2.ondblclick  = Function("openAppinIE('" + top.aDocs[3][nDocs] + "','" + top.aDocs[1][nDocs] + "');");
            
            
            // o o o
            
            td5.align = "center";
            td5.ondblclick  = Function("checkout('" + top.aDocs[3][i] + "');");
            
            var input2 = document.createElement("IMG");
            input2.src = "/Images/msoffice.gif";
            input2.border = "0";  
            input2.alt = "Check-Out document"
            
            td5.appendChild(input2);
            
            row.appendChild(td1);
            row.appendChild(td2);
            row.appendChild(td3);
            row.appendChild(td4);
            row.appendChild(td5);
            tbody.appendChild(row);
            
            row.click();
            
            top.nDocs = nDocs+1;
            
            document.attachFormUploadedDone.FULLNAME.value = '';
            document.attachFormUploadedDone.DocClass.value = '';


            top.hasComment(false,"<%=getDictionary("attach.docName")%>");
            top.hasComment(false,"<%=getDictionary("attach.docClass")%>");
            top.hasComment(false,"<%=getDictionary("attach.fileToUp")%>");


            document.attachFormUploadedDone.reset();
        }

     

    I guess  top.aDocs[1][i]  is much more efficient than  top.aDocs[i].docname  or  top.aDocs[i]['docname']  ...

     

    But what's this all about?

     

    Well, when the uploadForm form is submitted in Inner Frame, it loads 3rd ASP inside a hidden frame in Inner Frame. 3rd ASP then, on onload, sets the value of the sURL input field in Inner Frame.

    3rd ASP then submits (via javascript) the attachFormUploadedDone form in Inner Frame, and 4th ASP loads in hidden frame #2. 4th ASP calls javascript function in Inner Frame to signal the end of the upload. It is then that AddRowToTable is called (no value is ever passed in the sAccessPath parameter; its value is always fetched from sURL input field)

    It's AJAX without the X !

     

    aDocs is stored in Outer Frame because Inner Frame may be unloaded ([?] not sure) if the user decides to "change tabs" in Outer Frame.



    BuildStringDocs() function is used to build part of the URL query string that is passed to a controller when you navigate away from this mess (into another mess) when you submit a form in Outer Frame.

     

    PS: actually, 3rd ASP and 4th ASP aren't loaded directly, but rather as a redirect from two different "pages" that do a two step upload. And a 5th ASP that deletes temporary files is also called via javascript submission - in 4th ASP, I think.
     



  • @Zecc said:

    Can't do OO in javascript.

    Huh?

    @Zecc said:

    It's AJAX without the X !

    So?  People have been doing iframe-based "AJAX without the X" since before the name "AJAX" was even invented.  The iframe solution was pretty much the best option available during the IE 5 days (circa 2000), since XMLHttpRequest wouldn't be widely available 'til 2002-ish.



  • Yeah, what that other guy said. 

    JS can be OO--just check out Doug Crockford's work on it--but it's just not the kind you're expecting.  See PrototypeJS or a few other items for more on that, as well.

    In some ways, I like the iFrame-based stuff better than AJAX--if only because that nutty XMLHttpRequest object is not as much fun as you'd think it would be.

     



  • @merreborn said:

    @Zecc said:

    It's AJAX without the X !

    So?  People have been doing iframe-based "AJAX without the X" since before the name "AJAX" was even invented.  The iframe solution was pretty much the best option available during the IE 5 days (circa 2000), since XMLHttpRequest wouldn't be widely available 'til 2002-ish.

    In fact, I don't think I've ever seen somebody do "AJAX" with the X. Pretty much everybody seems to either send plain text or HTML fragments. It's a retarded name made up by a marketing goon.



  • I know.  It doesn't require XML and it's not always asynchronous, so I guess it's just called Javascript.


  • Considered Harmful

    @asuffield said:

    @merreborn said:

    @Zecc said:

    It's AJAX without the X !

    So?  People have been doing iframe-based "AJAX without the X" since before the name "AJAX" was even invented.  The iframe solution was pretty much the best option available during the IE 5 days (circa 2000), since XMLHttpRequest wouldn't be widely available 'til 2002-ish.

    In fact, I don't think I've ever seen somebody do "AJAX" with the X. Pretty much everybody seems to either send plain text or HTML fragments. It's a retarded name made up by a marketing goon.

    I've done AJAX with the X.  XHTML snippets can be embedded inside XML, which can then be injected into divs based on metadata contained in the XML.  It could fall back to standard HTML navigation if JavaScript was disabled. It was fully asynchronous as well.

    The application?  A resume-type website, mainly just to show off for potential employers.



  • On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.



  • @merreborn said:

    @Zecc said:
    Can't do OO in javascript.
    Huh?

     I was being ironic.

     
    My point was that they are using fairly advanced javascript (XML-less asynchronous-less AJAX, like Cap'n Steve pointed out) and yet they fail to create an object to group together the info about a document.

    They're storing different properties in different arrays! And when they add/remove a document, they have to add/remove elements in all of the arrays. And they use integer constants instead of strings. That's the WTF.

    I've rewritten it so it uses objects:

    var oDoc = {
        rowid      : ...,
        docname    : ...,
        docclass   : ...,
        accesspath : ...,
        docid      : ...
    };
    top.aDocs[top.nDocs++] = oDoc;
    

     

    My post was too "raw" and I kinda got a bit off topic though, sorry. I guess I needed some venting to exorcise my frustrations, but that's not your fault. I apologize.

     



  • @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.



  • @PSWorx said:

    @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.

    Same here. It's pretty darned convenient on the whole, with the exception of getting errors from stupidly forgetting those damned commas.



  • @PSWorx said:

    @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.

     Googled for JSON, because I had no clue what it was. And know I now. It's JSAAN. It's not "Object" it's "Associative Array". Curious how there can be pitfalls? Or is it just as abused as XML? (The holy sollution for all problems)



  • @Daid said:

    @PSWorx said:

    @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.

     Googled for JSON, because I had no clue what it was. And know I now. It's JSAAN. It's not "Object" it's "Associative Array". Curious how there can be pitfalls? Or is it just as abused as XML? (The holy sollution for all problems)

    There are in fact expolits for json, as I imagine there are with using XML this links points them out

     A good way to overcome this as suggested by MS is to add a comment /* in front of the returning JSON and parse it out before an eval.



  • @Daid said:

    @PSWorx said:

    @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.

     Googled for JSON, because I had no clue what it was. And know I now. It's JSAAN. It's not "Object" it's "Associative Array". Curious how there can be pitfalls? Or is it just as abused as XML? (The holy sollution for all problems)

    Well, sure, but we like to pretend that objects are different than arrays. :)

    The pitfalls revolve around the use of eval() to decode JSON.  Doug Crockford supplies a parser that turns JSON strings into objects without falling prey to evil eval() injection. 

     



  • @mrprogguy said:

    @Daid said:
    @PSWorx said:

    @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.

     Googled for JSON, because I had no clue what it was. And know I now. It's JSAAN. It's not "Object" it's "Associative Array". Curious how there can be pitfalls? Or is it just as abused as XML? (The holy sollution for all problems)

    Well, sure, but we like to pretend that objects are different than arrays. :)

    The pitfalls revolve around the use of eval() to decode JSON.  Doug Crockford supplies a parser that turns JSON strings into objects without falling prey to evil eval() injection. 

     

    I've done a number of both AJAX (or should I say AJAJ?) and Flash projects using PHP/MySQL on the backend and JSON to transfer data both ways.

    Pitfalls are: 

    • For particularly large/complex objects, JSON decoding may freeze Flash. If you made your own XML/whatever parser, you could program in update intervals.
    • JSON is incredibly non-human-readable. After about 3 levels of nested brackets, you lose track of all structure. Debugging projects was only possible for me by using a Javascript source code formatter (like SourceFormatX...anybody know of a free one?).
    • As mrprogguy mentioned, there are the potential eval()-based injection attacks, but if you use a parser on the JS side, these are avoided completely.
    • Encoding special chars is a little tricky...especially in Flash (MySql (slash-escaped) --> PHP --> Javascript (slash-escaped) --> Flash (url-encoded) ). If you know your server-side language's functions well it's easier, at least.

    These are the only flaws I can think of. For the most part, it's a quick, easy, light-footprint, flexible way to transfer objects and information between a client/server, or between any two languages, for that matter.


  • @PSWorx said:

    @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.

    Now that so many people have posted 'huh?', I feel like my explanation may make me seem dense. But here goes.

    (This is in the context of writing server-side scripts to return information to AJAX calls). 

     As one person mentioned, it can be unfriendly on the human eyes. I usually don't care about this, until I have to find the missing comma. Or the extra comma.

    I came across JSON while writing Perl scripts to return tables that would be rendered on an HTML page asynchronously. I fell out of love with JSON because I was returning data like so (please forgive any syntax errors):

    result {
                 table {
                           row0 {
                                   'field1' : 'Some data',
                                   'field2' : 'More data',
                                   'field3' : 'etc....'
                                    },
                           row1 {
                                     ....
                                   }
                            }
                }

    Then, on the JavaScript side, I would have to build the table with either the nice DOM method (insertRow, etc.) or the document.write method.

    On the other hand, I could use Perl to build a text string build the table in HTML, with all the styles, classes, etc., hand it back, and use innerHTML. Not as elegant, but in a time-sensitive environment, more satisfying.

    Also, we found that when the number of table rows get up into the hundreds, DOM modifications (insertRow, deleteRow) can become pretty slow, especially in IE.

    JSON is nice for small, non-uniform result sets. Also, you can build whole data structures and functions with it, not just printable data. But my frustration came about from wanting to build fairly complex tables from a DB result set. (And when it came right down to it, we decided on page refreshes rather than asynchronous calls.)

     

            
     



  • @boolean said:

    • JSON is incredibly non-human-readable. After about 3 levels of nested brackets, you lose track of all structure. Debugging projects was only possible for me by using a Javascript source code formatter (like SourceFormatX...anybody know of a free one?).

    I'm not sure what to tell you.  JSON and YAML are modified S-Expression languages.  If they're properly organized (with relatively small tab sizes and well placed new lines), they're trivial to read.  Lispers and Schemers have been doing it since the 1960's.



  • @R.Flowers said:

    On the other hand, I could use Perl to build a text string build the table in HTML, with all the styles, classes, etc., hand it back, and use innerHTML. Not as elegant, but in a time-sensitive environment, more satisfying.

    I would say that constructing the HTML on the server and sending it to a thin client is more elegant than building complexity on both ends. Server-side behaviour is more reliable anyway - the user could be using a browser with who knows what bugs in its javascript/DOM implementation, and the less you do in the part you cannot test, the better. 



  • @asuffield said:

    I would say that constructing the HTML on the server and sending it to a thin client is more elegant than building complexity on both ends. Server-side behaviour is more reliable anyway - the user could be using a browser with who knows what bugs in its javascript/DOM implementation, and the less you do in the part you cannot test, the better. 

    Score:5, insightful 



  • @asuffield said:

    @R.Flowers said:

    On the other hand, I could use Perl to build a text string build the table in HTML, with all the styles, classes, etc., hand it back, and use innerHTML. Not as elegant, but in a time-sensitive environment, more satisfying.

    I would say that constructing the HTML on the server and sending it to a thin client is more elegant than building complexity on both ends. Server-side behaviour is more reliable anyway - the user could be using a browser with who knows what bugs in its javascript/DOM implementation, and the less you do in the part you cannot test, the better. 

    I would like to nuance that. Sending Data chunks instead of HTML avoids processing time on the server and limits network traffic. The chance you hit an agent with bugs in JS/DOM is IMO smaller than hitting one with bugs in the Tag Soup Parser, so I don't see a problem there.

    That said, you should avoid building HTML on either side. Inserting data chunks into semi-static HTML is better - although obviously not  always possible. The middle road is to load semi-static HTML chunks separately from dynamic data (JSON, XML, or ... I don't care); CSS and JS bits are all cached in the agent.

    Example: a real time chart

    1. Initial page has a button "show chart"
    2. Hit button will load static HTML with empty <canvas> element embedded in a <div> with fashionable styling and imagery - which is placed somewhere in the DOM
    3. Get historic timeseries data (intensive server side query)
    4. Timeseries data rendered in canvas (intensive client side)
    5. From then on, periodic poll for updates (latest value - easy on the client and the server)


  • @mrprogguy said:

    @Daid said:
    @PSWorx said:

    @R.Flowers said:

    On a somewhat related note, I got very excited about using JSON with AJAX. Soon after, I got very frustrated.

    Why, by the way? From what I read so far, it looked like a very elegant solution (in theory). I'd be curious to know what the pitfalls are.

     Googled for JSON, because I had no clue what it was. And know I now. It's JSAAN. It's not "Object" it's "Associative Array". Curious how there can be pitfalls? Or is it just as abused as XML? (The holy sollution for all problems)

    Well, sure, but we like to pretend that objects are different than arrays. :)

    The pitfalls revolve around the use of eval() to decode JSON.  Doug Crockford supplies a parser that turns JSON strings into objects without falling prey to evil eval() injection. 

     

     

    Actually, objects were proven vulnerable as well.

     



  • @JvdL said:

    I would like to nuance that. Sending Data chunks instead of HTML avoids processing time on the server and limits network traffic. The chance you hit an agent with bugs in JS/DOM is IMO smaller than hitting one with bugs in the Tag Soup Parser, so I don't see a problem there.

    That said, you should avoid building HTML on either side. Inserting data chunks into semi-static HTML is better - although obviously not  always possible. The middle road is to load semi-static HTML chunks separately from dynamic data (JSON, XML, or ... I don't care); CSS and JS bits are all cached in the agent.

    If the Data Chunk in your middle ground solution is factual HTML, doesn't that mean you're building it on the server anyway and that you've solutioned yourself right back to square 1?



  • @dhromed said:

    @JvdL said:


    I would like to nuance that. Sending Data chunks instead of HTML avoids processing time on the server and limits network traffic. The chance you hit an agent with bugs in JS/DOM is IMO smaller than hitting one with bugs in the Tag Soup Parser, so I don't see a problem there.

    That said, you should avoid building HTML on either side. Inserting data chunks into semi-static HTML is better - although obviously not  always possible. The middle road is to load semi-static HTML chunks separately from dynamic data (JSON, XML, or ... I don't care); CSS and JS bits are all cached in the agent.


    If the Data Chunk in your middle ground solution is factual HTML, doesn't that mean you're building it on the server anyway and that you've solutioned yourself right back to square 1?



    By data chunk I meant raw data without markup. Granted: if you encode the data in JSON or XML also adds overhead to the data, but in general that's minimal compared to data that's marked up for user consumption.

    Example: When I hit the "Quote" button on your post, it gave me 67851 bytes of HTML. whereas the dynamic data (the content of your post) was only 779 bytes. I'm afraid this ratio of 1:100 for data:markup is typical. The WTF site could use a static 67K HTML page that would be loaded once and cached on the browser.  Then generating a "Quote form" would only amount to retrieving and sending only the 779 data bytes almost verbatim from the data store. That's a 99% reduction in workload!

    The chart example in the previous post makes this even clearer: sending raw numeric data without much ado is much faster than generating a dynamic PNG on the server, even more so if there are only small incremental data differences.

    The general idea is to distribute CPU time to the browser agent, which presumably most of the time isn't doing much more than sending photons to a human eye. And if that presumption is false, hey, screw your agent, not my server.

    A word of caution: processing text to generate HTML usually dwarfs compared to retrieving data from a relational database, so don't scrap your 100-server cluster-farm yet! But someone like asuffield could fix that.


Log in to reply